diff --git a/scorpio-broker/AllInOneRunner/.gitignore b/scorpio-broker/AllInOneRunner/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..a1c3ab4d08c0f9f91918f21c730272a4711885e8 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/.gitignore @@ -0,0 +1,4 @@ +/target/ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/AllInOneRunner/hs_err_pid11763.log b/scorpio-broker/AllInOneRunner/hs_err_pid11763.log new file mode 100644 index 0000000000000000000000000000000000000000..c47d44b227b6cba7557ad9940ab6ebbfed79e27e --- /dev/null +++ b/scorpio-broker/AllInOneRunner/hs_err_pid11763.log @@ -0,0 +1,20 @@ +# +# A fatal error has been detected by the Java Runtime Environment: +# +# SIGSEGV (0xb) at pc=0x00007fbc3b5bc401, pid=11763, tid=11779 +# +# JRE version: OpenJDK Runtime Environment (11.0.6+10) (build 11.0.6+10-post-Ubuntu-1ubuntu118.04.1) +# Java VM: OpenJDK 64-Bit Server VM (11.0.6+10-post-Ubuntu-1ubuntu118.04.1, mixed mode, tiered, compressed oops, g1 gc, linux-amd64) +# Problematic frame: +# C [libjimage.so+0x2401] +# +# Core dump will be written. Default location: Core dumps may be processed with "/usr/share/apport/apport %p %s %c %d %P" (or dumping to /home/bla/ScorpioBroker/AllInOneRunner/core.11763) +# +# If you would like to submit a bug report, please visit: +# https://bugs.launchpad.net/ubuntu/+source/openjdk-lts +# + +--------------- S U M M A R Y ------------ + +Command Line: -agentlib:jdwp=transport=dt_socket,suspend=y,address=localhost:35417 -Dspring.profiles.active=dev -javaagent:/home/bla/Downloads/eclipse-jee-2019-12-R-linux-gtk-x86_64/eclipse/configuration/org.eclipse.osgi/405/0/.cp/lib/javaagent-shaded.jar -Dfile.encoding=UTF-8 eu.neclab.ngsildbroker.runner.Runner + diff --git a/scorpio-broker/AllInOneRunner/hs_err_pid5675.log b/scorpio-broker/AllInOneRunner/hs_err_pid5675.log new file mode 100644 index 0000000000000000000000000000000000000000..d0531bbc0a584d0dfaaba8b722c22dcac3a7d752 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/hs_err_pid5675.log @@ -0,0 +1,21 @@ +# +# A fatal error has been detected by the Java Runtime Environment: +# +# SIGSEGV (0xb) at pc=0x00007f89bf5bc401, pid=5675, tid=5691 +# +# JRE version: OpenJDK Runtime Environment (11.0.6+10) (build 11.0.6+10-post-Ubuntu-1ubuntu118.04.1) +# Java VM: OpenJDK 64-Bit Server VM (11.0.6+10-post-Ubuntu-1ubuntu118.04.1, mixed mode, tiered, compressed oops, g1 gc, linux-amd64) +# Problematic frame: +# C [libjimage.so+0x2401] +# +# Core dump will be written. Default location: Core dumps may be processed with "/usr/share/apport/apport %p %s %c %d %P" (or dumping to /home/bla/ScorpioBroker/AllInOneRunner/core.5675) +# +# If you would like to submit a bug report, please visit: +# https://bugs.launchpad.net/ubuntu/+source/openjdk-lts +# + +--------------- S U M M A R Y ------------ + +Command Line: -agentlib:jdwp=transport=dt_socket,suspend=y,address=localhost:33055 -Dspring.profiles.active=dev -javaagent:/home/bla/Downloads/eclipse-jee-2019-12-R-linux-gtk-x86_64/eclipse/configuration/org.eclipse.osgi/405/0/.cp/lib/javaagent-shaded.jar -Dfile.encoding=UTF-8 eu.neclab.ngsildbroker.runner.Runner + +Host: Intel(R) Core(TM) i5-6300HQ CPU @ 2.30GHz, 4 cores, 4G, \ No newline at end of file diff --git a/scorpio-broker/AllInOneRunner/pom.xml b/scorpio-broker/AllInOneRunner/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..cc479f0781197d46e76eac2995e8b9838eb84755 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/pom.xml @@ -0,0 +1,86 @@ + + 4.0.0 + + eu.neclab.ngsildbroker + BrokerParent + 1.0.0-SNAPSHOT + ../BrokerParent + + AllInOneRunner + 1.0.0-SNAPSHOT + jar + + + + + + skipDefault + + + docker-aaio + + + + org.springframework.boot + spring-boot-maven-plugin + + + + repackage + + + + + + + + + + + eu.neclab.ngsildbroker + InfoServer + 1.0.0-SNAPSHOT + + + eu.neclab.ngsildbroker + AtContextServer + 1.0.0-SNAPSHOT + + + eu.neclab.ngsildbroker + EntityManager + 1.0.0-SNAPSHOT + + + eu.neclab.ngsildbroker + HistoryManager + 1.0.0-SNAPSHOT + + + eu.neclab.ngsildbroker + QueryManager + 1.0.0-SNAPSHOT + + + eu.neclab.ngsildbroker + RegistryManager + 1.0.0-SNAPSHOT + + + eu.neclab.ngsildbroker + StorageManager + 1.0.0-SNAPSHOT + + + eu.neclab.ngsildbroker + SubscriptionManager + 1.0.0-SNAPSHOT + + + + \ No newline at end of file diff --git a/scorpio-broker/AllInOneRunner/src/main/java/eu/neclab/ngsildbroker/runner/Runner.java b/scorpio-broker/AllInOneRunner/src/main/java/eu/neclab/ngsildbroker/runner/Runner.java new file mode 100644 index 0000000000000000000000000000000000000000..a176d72f6ad791baa6e00058b36fb6d37771f0de --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/java/eu/neclab/ngsildbroker/runner/Runner.java @@ -0,0 +1,20 @@ +package eu.neclab.ngsildbroker.runner; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.ComponentScan.Filter; + +import org.springframework.context.annotation.FilterType; + +@SpringBootApplication +@ComponentScan(basePackages = { + "eu.neclab.ngsildbroker.*" }, excludeFilters = @Filter(type = FilterType.REGEX, pattern = { + "eu.neclab.ngsildbroker.commons.*" })) +//@Import(KafkaConfig.class) +public class Runner { + + public static void main(String[] args) throws Exception{ + SpringApplication.run(Runner.class, args); + } +} diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/application-aaio.yml b/scorpio-broker/AllInOneRunner/src/main/resources/application-aaio.yml new file mode 100644 index 0000000000000000000000000000000000000000..4eaba240583c808ea7a65df22be58e6715117c35 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/application-aaio.yml @@ -0,0 +1,192 @@ +append: + overwrite: noOverwrite +atcontext: + url: http://localhost:9090/ngsi-ld/contextes/ +bootstrap: + servers: kafka:9092 +broker: + id: FedBroker1 + parent: + location: + url: SELF + +query: + result: + topic: QUERY_RESULT + topic: QUERY + + +csource: + stopListenerIfDbFails: false + topic: CONTEXT_SOURCE + registry: + topic: CONTEXT_REGISTRY + source: + topic: CONTEXT_SOURCE + notification: + topic: CONTEXT_SOURCE_NOTIFICATION + query: + topic: CONTEXT_REGISTRY_QUERY + result: + topic: CONTEXT_REGISTRY_QUERY_RESULT + +csources: + registration: + topic: CONTEXT_REGISTRY +defaultLimit: 50 +directDbConnection: false +entity: + append: + topic: ENTITY_APPEND + create: + topic: ENTITY_CREATE + delete: + topic: ENTITY_DELETE + update: + topic: ENTITY_UPDATE + index: + topic: ENTITY_INDEX + keyValues: + topic: KVENTITY + stopListenerIfDbFails: false + temporal: + stopListenerIfDbFails: false + topic: TEMPORALENTITY + topic: ENTITY + withoutSysAttrs: + topic: ENTITY_WITHOUT_SYSATTRS + +submanager: + subscription: + topic: SUBSCRIPTIONS + +kafka: + replytimeout: 10000 +management: + endpoint: + restart: + enabled: true + endpoints: + web: + exposure: + include: "*" +max: + request: + size: 104857600 +maxLimit: 500 +ngb: + debugmode: false + + +reader: + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP_Reader + password: ngb + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_reader + username: ngb + enabled: true + +server: + port: 1025 + tomcat: + max: + threads: 200 +spring: + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + bindings: + ENTITY_WRITE_CHANNEL: + destination: ENTITY + contentType: application/json + KVENTITY_WRITE_CHANNEL: + destination: KVENTITY + contentType: application/json + ENTITY_WITHOUT_SYSATTRS_WRITE_CHANNEL: + destination: ENTITY_WITHOUT_SYSATTRS + contentType: application/json + CREATE_WRITE_CHANNEL: + destination: ENTITY_CREATE + contentType: application/json + APPEND_WRITE_CHANNEL: + destination: ENTITY_APPEND + contentType: application/json + UPDATE_WRITE_CHANNEL: + destination: ENTITY_UPDATE + contentType: application/json + DELETE_WRITE_CHANNEL: + destination: ENTITY_DELETE + contentType: application/json + CONTEXT_REGISTRY_WRITE_CHANNEL: + destination: CONTEXT_REGISTRY + contentType: application/json + CONTEXT_REGISTRY_UPDATE_CHANNEL: + destination: CONTEXT_UPDATE + contentType: application/json + INDEX_WRITE_CHANNEL: + destination: ENTITY_INDEX + contentType: application/json + ATCONTEXT_WRITE_CHANNEL: + destination: ATCONTEXT + contentType: application/json + TEMPORAL_ENTITY_WRITE_CHANNEL: + destination: TEMPORALENTITY + contentType: application/json + SUBSCRIPTIONS_WRITE_CHANNEL: + destination: SUBSCRIPTIONS + contentType: application/json + CSOURCE_SUBSCRIPTION_WRITE_CHANNEL: + destination: CSOURCE_SUBSCRIPTIONS + contentType: application/json + CSOURCE_REGISTRATION_WRITE_CHANNEL: + destination: CONTEXT_SOURCE + contentType: application/json + CSOURCE_NOTIFICATION_WRITE_CHANNEL: + destination: CONTEXT_SOURCE_NOTIFICATION + contentType: application/json + application.name: aio-runner + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP + password: ngb + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_registrymanager + username: ngb + flyway: + baselineOnMigrate: true + kafka: + admin: + properties: + cleanup: + policy: compact + main: + lazy-initialization: true + allow-bean-definition-overriding: true + +selfhostcorecontext: http://localhost:9090/corecontext +writer: + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP_Writer + password: ngb + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_writer + username: ngb + enabled: true + diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/application-dev.yml b/scorpio-broker/AllInOneRunner/src/main/resources/application-dev.yml new file mode 100644 index 0000000000000000000000000000000000000000..1c48fc6566c8082a39a8d31181f9ffd6a283b810 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/application-dev.yml @@ -0,0 +1,290 @@ +append: + overwrite: noOverwrite +atcontext: + url: http://localhost:9090/ngsi-ld/contextes/ +bootstrap: + servers: localhost:9092 +broker: + id: FedBroker1 + parent: + location: + url: SELF + +query: + result: + topic: QUERY_RESULT + topic: QUERY + +csource: + stopListenerIfDbFails: false + topic: CONTEXT_SOURCE + registry: + topic: CONTEXT_REGISTRY + source: + topic: CONTEXT_SOURCE + notification: + topic: CONTEXT_SOURCE_NOTIFICATION + query: + topic: CONTEXT_REGISTRY_QUERY + result: + topic: CONTEXT_REGISTRY_QUERY_RESULT + +csources: + registration: + topic: CONTEXT_REGISTRY +defaultLimit: 50 +directDbConnection: false +entity: + append: + topic: ENTITY_APPEND + create: + topic: ENTITY_CREATE + delete: + topic: ENTITY_DELETE + update: + topic: ENTITY_UPDATE + index: + topic: ENTITY_INDEX + keyValues: + topic: KVENTITY + stopListenerIfDbFails: false + temporal: + stopListenerIfDbFails: false + topic: TEMPORALENTITY + topic: ENTITY + withoutSysAttrs: + topic: ENTITY_WITHOUT_SYSATTRS + +submanager: + subscription: + topic: SUBSCRIPTIONS + +kafka: + replytimeout: 10000 +management: + endpoint: + restart: + enabled: true + endpoints: + web: + exposure: + include: "*" +max: + request: + size: 104857600 +maxLimit: 500 +ngb: + debugmode: false + + +reader: + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP_Reader + password: ngb + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_storagemanager_reader + username: ngb + enabled: true + +spring: + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + bindings: + ENTITY_WRITE_CHANNEL: + destination: ENTITY + contentType: application/json + KVENTITY_WRITE_CHANNEL: + destination: KVENTITY + contentType: application/json + ENTITY_WITHOUT_SYSATTRS_WRITE_CHANNEL: + destination: ENTITY_WITHOUT_SYSATTRS + contentType: application/json + CREATE_WRITE_CHANNEL: + destination: ENTITY_CREATE + contentType: application/json + APPEND_WRITE_CHANNEL: + destination: ENTITY_APPEND + contentType: application/json + UPDATE_WRITE_CHANNEL: + destination: ENTITY_UPDATE + contentType: application/json + DELETE_WRITE_CHANNEL: + destination: ENTITY_DELETE + contentType: application/json + CONTEXT_REGISTRY_WRITE_CHANNEL: + destination: CONTEXT_REGISTRY + contentType: application/json + CONTEXT_REGISTRY_UPDATE_CHANNEL: + destination: CONTEXT_UPDATE + contentType: application/json + INDEX_WRITE_CHANNEL: + destination: ENTITY_INDEX + contentType: application/json + ATCONTEXT_WRITE_CHANNEL: + destination: ATCONTEXT + contentType: application/json + TEMPORAL_ENTITY_WRITE_CHANNEL: + destination: TEMPORALENTITY + contentType: application/json + SUBSCRIPTIONS_WRITE_CHANNEL: + destination: SUBSCRIPTIONS + contentType: application/json + CSOURCE_SUBSCRIPTION_WRITE_CHANNEL: + destination: CSOURCE_SUBSCRIPTIONS + contentType: application/json + CSOURCE_REGISTRATION_WRITE_CHANNEL: + destination: CONTEXT_SOURCE + contentType: application/json + CSOURCE_NOTIFICATION_WRITE_CHANNEL: + destination: CONTEXT_SOURCE_NOTIFICATION + contentType: application/json + application.name: aio-runner + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP + password: ngb + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_registrymanager + username: ngb + flyway: + baselineOnMigrate: true + kafka: + admin: + properties: + cleanup: + policy: compact + main: + lazy-initialization: true + allow-bean-definition-overriding: true + + +writer: + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP_Writer + password: ngb + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_storagemanager_writer + username: ngb + enabled: true + +# Increase the Hystrix timeout to 60s (globally) +hystrix: + command: + default: + execution: + isolation: + thread: + timeoutInMilliseconds: 60000 + + +#GET request configuration for QUERY-MANAGER +ribbon: + ReadTimeout: 60000 + ConnectTimeout: 60000 + + + +query-manager: + request-path: /ngsi-ld/v1/entities + target-service: aio-runner + http-method: GET + + + +server: + port: 27015 + tomcat: + max: + threads:200 + +eureka: + client: + register-with-eureka: true + fetchRegistry: true + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +zuul: + ignored-services: '*' + semaphore: + maxSemaphores: 60000 + host: + connect-timeout-millis: 60000 + socket-timeout-millis: 60000 + routes: + entity-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/entities/** + serviceId: aio-runner + stripPrefix: false + subscription-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/subscriptions/** + serviceId: aio-runner + stripPrefix: false + context-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceRegistrations/** + serviceId: aio-runner + stripPrefix: false + subforeg: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceSubscriptions/** + serviceId: aio-runner + stripPrefix: false + history-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/temporal/** + serviceId: aio-runner + stripPrefix: false + atcontext-server: + sensitiveHeaders: + path: /ngsi-ld/contextes/** + serviceId: aio-runner + stripPrefix: false + test-manager: + sensitiveHeaders: + path: /test/** + serviceId: testmanager + stripPrefix: false +selfhostcorecontext: http://localhost:9090/corecontext +security: + active: false + oauth2: + client: + # for keycloak configure + #accessTokenUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/token + #userAuthorizationUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/auth + #clientId: authserver + #clientSecret: ae8c99a9-f98d-41e9-8fb2-d348acb987e0 + # for ketrock configure + accessTokenUri: http://172.30.64.120:3000/oauth2/token + userAuthorizationUri: http://172.30.64.120:3000/oauth2/authorize + clientId: a2034c11-d2a9-4cab-9fac-ff65425bd53f + clientSecret: 7364baec-6d6f-4307-8c71-d66e1e6c3afc + resource: + # for keycloak configure + #userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo + # for keycloak configure + userInfoUri: http://172.30.64.120:3000/user +logging: + level: + root: ERROR + diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/application-local.yml b/scorpio-broker/AllInOneRunner/src/main/resources/application-local.yml new file mode 100644 index 0000000000000000000000000000000000000000..663ecb275ddac9fc1d33ca57b85cb1d235d74526 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/application-local.yml @@ -0,0 +1,292 @@ +append: + overwrite: noOverwrite +atcontext: + url: http://localhost:9090/ngsi-ld/contextes/ +bootstrap: + servers: localhost:9092 +broker: + id: FedBroker1 + parent: + location: + url: SELF + +query: + result: + topic: QUERY_RESULT + topic: QUERY +batchoperations: + maxnumber: + create: 1000 + update: 1000 + upsert: 1000 + delete: 1000 + +csource: + stopListenerIfDbFails: false + topic: CONTEXT_SOURCE + registry: + topic: CONTEXT_REGISTRY + source: + topic: CONTEXT_SOURCE + notification: + topic: CONTEXT_SOURCE_NOTIFICATION + query: + topic: CONTEXT_REGISTRY_QUERY + result: + topic: CONTEXT_REGISTRY_QUERY_RESULT + +csources: + registration: + topic: CONTEXT_REGISTRY +defaultLimit: 50 +directDbConnection: false +entity: + append: + topic: ENTITY_APPEND + create: + topic: ENTITY_CREATE + delete: + topic: ENTITY_DELETE + update: + topic: ENTITY_UPDATE + index: + topic: ENTITY_INDEX + keyValues: + topic: KVENTITY + stopListenerIfDbFails: false + temporal: + stopListenerIfDbFails: false + topic: TEMPORALENTITY + topic: ENTITY + withoutSysAttrs: + topic: ENTITY_WITHOUT_SYSATTRS + +submanager: + subscription: + topic: SUBSCRIPTIONS + +kafka: + replytimeout: 10000 +management: + endpoint: + restart: + enabled: true + endpoints: + web: + exposure: + include: "*" +max: + request: + size: 104857600 +maxLimit: 500 +ngb: + debugmode: false + + +reader: + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP_Reader + password: ngb + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_storagemanager_reader + username: ngb + enabled: true + +spring: + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + bindings: + ENTITY_WRITE_CHANNEL: + destination: ENTITY + contentType: application/json + KVENTITY_WRITE_CHANNEL: + destination: KVENTITY + contentType: application/json + ENTITY_WITHOUT_SYSATTRS_WRITE_CHANNEL: + destination: ENTITY_WITHOUT_SYSATTRS + contentType: application/json + CREATE_WRITE_CHANNEL: + destination: ENTITY_CREATE + contentType: application/json + APPEND_WRITE_CHANNEL: + destination: ENTITY_APPEND + contentType: application/json + UPDATE_WRITE_CHANNEL: + destination: ENTITY_UPDATE + contentType: application/json + DELETE_WRITE_CHANNEL: + destination: ENTITY_DELETE + contentType: application/json + CONTEXT_REGISTRY_WRITE_CHANNEL: + destination: CONTEXT_REGISTRY + contentType: application/json + CONTEXT_REGISTRY_UPDATE_CHANNEL: + destination: CONTEXT_UPDATE + contentType: application/json + INDEX_WRITE_CHANNEL: + destination: ENTITY_INDEX + contentType: application/json + ATCONTEXT_WRITE_CHANNEL: + destination: ATCONTEXT + contentType: application/json + TEMPORAL_ENTITY_WRITE_CHANNEL: + destination: TEMPORALENTITY + contentType: application/json + SUBSCRIPTIONS_WRITE_CHANNEL: + destination: SUBSCRIPTIONS + contentType: application/json + CSOURCE_SUBSCRIPTION_WRITE_CHANNEL: + destination: CSOURCE_SUBSCRIPTIONS + contentType: application/json + CSOURCE_REGISTRATION_WRITE_CHANNEL: + destination: CONTEXT_SOURCE + contentType: application/json + CSOURCE_NOTIFICATION_WRITE_CHANNEL: + destination: CONTEXT_SOURCE_NOTIFICATION + contentType: application/json + application.name: aio-runner + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP + password: ngb + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_registrymanager + username: ngb + flyway: + baselineOnMigrate: true + kafka: + admin: + properties: + cleanup: + policy: compact + main: + lazy-initialization: true + allow-bean-definition-overriding: true + + +writer: + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP_Writer + password: ngb + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_storagemanager_writer + username: ngb + enabled: true + +# Increase the Hystrix timeout to 60s (globally) +hystrix: + command: + default: + execution: + isolation: + thread: + timeoutInMilliseconds: 60000 + +selfhostcorecontext: http://localhost:9090/corecontext +#GET request configuration for QUERY-MANAGER +ribbon: + ReadTimeout: 60000 + ConnectTimeout: 60000 + + + +query-manager: + request-path: /ngsi-ld/v1/entities + target-service: aio-runner + http-method: GET + + + +server: + port: 27015 + tomcat: + max: + threads:200 + +eureka: + client: + register-with-eureka: true + fetchRegistry: true + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +zuul: + ignored-services: '*' + semaphore: + maxSemaphores: 60000 + host: + connect-timeout-millis: 60000 + socket-timeout-millis: 60000 + routes: + entity-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/entities/** + serviceId: aio-runner + stripPrefix: false + subscription-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/subscriptions/** + serviceId: aio-runner + stripPrefix: false + context-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceRegistrations/** + serviceId: aio-runner + stripPrefix: false + subforeg: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceSubscriptions/** + serviceId: aio-runner + stripPrefix: false + history-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/temporal/** + serviceId: aio-runner + stripPrefix: false + atcontext-server: + sensitiveHeaders: + path: /ngsi-ld/contextes/** + serviceId: aio-runner + stripPrefix: false + test-manager: + sensitiveHeaders: + path: /test/** + serviceId: testmanager + stripPrefix: false +security: + active: false + oauth2: + client: + # for keycloak configure + #accessTokenUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/token + #userAuthorizationUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/auth + #clientId: authserver + #clientSecret: ae8c99a9-f98d-41e9-8fb2-d348acb987e0 + # for ketrock configure + accessTokenUri: http://172.30.64.120:3000/oauth2/token + userAuthorizationUri: http://172.30.64.120:3000/oauth2/authorize + clientId: a2034c11-d2a9-4cab-9fac-ff65425bd53f + clientSecret: 7364baec-6d6f-4307-8c71-d66e1e6c3afc + resource: + # for keycloak configure + #userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo + # for keycloak configure + userInfoUri: http://172.30.64.120:3000/user + diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/application.yml b/scorpio-broker/AllInOneRunner/src/main/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..0703379a4a0f10d97cf2e6af411da8aa1a454d82 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/application.yml @@ -0,0 +1,292 @@ +append: + overwrite: noOverwrite +atcontext: + url: http://localhost:9090/ngsi-ld/contextes/ +bootstrap: + servers: kafka:9092 +broker: + id: FedBroker1 + parent: + location: + url: SELF + +query: + result: + topic: QUERY_RESULT + topic: QUERY +batchoperations: + maxnumber: + create: 1000 + update: 1000 + upsert: 1000 + delete: 1000 + +csource: + stopListenerIfDbFails: false + topic: CONTEXT_SOURCE + registry: + topic: CONTEXT_REGISTRY + source: + topic: CONTEXT_SOURCE + notification: + topic: CONTEXT_SOURCE_NOTIFICATION + query: + topic: CONTEXT_REGISTRY_QUERY + result: + topic: CONTEXT_REGISTRY_QUERY_RESULT + +csources: + registration: + topic: CONTEXT_REGISTRY +defaultLimit: 50 +directDbConnection: false +entity: + append: + topic: ENTITY_APPEND + create: + topic: ENTITY_CREATE + delete: + topic: ENTITY_DELETE + update: + topic: ENTITY_UPDATE + index: + topic: ENTITY_INDEX + keyValues: + topic: KVENTITY + stopListenerIfDbFails: false + temporal: + stopListenerIfDbFails: false + topic: TEMPORALENTITY + topic: ENTITY + withoutSysAttrs: + topic: ENTITY_WITHOUT_SYSATTRS + +submanager: + subscription: + topic: SUBSCRIPTIONS + +kafka: + replytimeout: 10000 +management: + endpoint: + restart: + enabled: true + endpoints: + web: + exposure: + include: "*" +max: + request: + size: 104857600 +maxLimit: 500 +ngb: + debugmode: false + + +reader: + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP_Reader + password: ngb + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_reader + username: ngb + enabled: true + +spring: + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + bindings: + ENTITY_WRITE_CHANNEL: + destination: ENTITY + contentType: application/json + KVENTITY_WRITE_CHANNEL: + destination: KVENTITY + contentType: application/json + ENTITY_WITHOUT_SYSATTRS_WRITE_CHANNEL: + destination: ENTITY_WITHOUT_SYSATTRS + contentType: application/json + CREATE_WRITE_CHANNEL: + destination: ENTITY_CREATE + contentType: application/json + APPEND_WRITE_CHANNEL: + destination: ENTITY_APPEND + contentType: application/json + UPDATE_WRITE_CHANNEL: + destination: ENTITY_UPDATE + contentType: application/json + DELETE_WRITE_CHANNEL: + destination: ENTITY_DELETE + contentType: application/json + CONTEXT_REGISTRY_WRITE_CHANNEL: + destination: CONTEXT_REGISTRY + contentType: application/json + CONTEXT_REGISTRY_UPDATE_CHANNEL: + destination: CONTEXT_UPDATE + contentType: application/json + INDEX_WRITE_CHANNEL: + destination: ENTITY_INDEX + contentType: application/json + ATCONTEXT_WRITE_CHANNEL: + destination: ATCONTEXT + contentType: application/json + TEMPORAL_ENTITY_WRITE_CHANNEL: + destination: TEMPORALENTITY + contentType: application/json + SUBSCRIPTIONS_WRITE_CHANNEL: + destination: SUBSCRIPTIONS + contentType: application/json + CSOURCE_SUBSCRIPTION_WRITE_CHANNEL: + destination: CSOURCE_SUBSCRIPTIONS + contentType: application/json + CSOURCE_REGISTRATION_WRITE_CHANNEL: + destination: CONTEXT_SOURCE + contentType: application/json + CSOURCE_NOTIFICATION_WRITE_CHANNEL: + destination: CONTEXT_SOURCE_NOTIFICATION + contentType: application/json + application.name: aio-runner + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP + password: ngb + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_registrymanager + username: ngb + flyway: + baselineOnMigrate: true + kafka: + admin: + properties: + cleanup: + policy: compact + main: + lazy-initialization: true + allow-bean-definition-overriding: true + + +writer: + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP_Writer + password: ngb + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_writer + username: ngb + enabled: true + +# Increase the Hystrix timeout to 60s (globally) +hystrix: + command: + default: + execution: + isolation: + thread: + timeoutInMilliseconds: 60000 + + +#GET request configuration for QUERY-MANAGER +ribbon: + ReadTimeout: 60000 + ConnectTimeout: 60000 + + + +query-manager: + request-path: /ngsi-ld/v1/entities + target-service: aio-runner + http-method: GET + + + +server: + port: 27015 + tomcat: + max: + threads:200 + +eureka: + client: + register-with-eureka: true + fetchRegistry: true + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +zuul: + ignored-services: '*' + semaphore: + maxSemaphores: 60000 + host: + connect-timeout-millis: 60000 + socket-timeout-millis: 60000 + routes: + entity-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/entities/** + serviceId: aio-runner + stripPrefix: false + subscription-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/subscriptions/** + serviceId: aio-runner + stripPrefix: false + context-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceRegistrations/** + serviceId: aio-runner + stripPrefix: false + subforeg: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceSubscriptions/** + serviceId: aio-runner + stripPrefix: false + history-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/temporal/** + serviceId: aio-runner + stripPrefix: false + atcontext-server: + sensitiveHeaders: + path: /ngsi-ld/contextes/** + serviceId: aio-runner + stripPrefix: false + test-manager: + sensitiveHeaders: + path: /test/** + serviceId: testmanager + stripPrefix: false +security: + active: false + oauth2: + client: + # for keycloak configure + #accessTokenUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/token + #userAuthorizationUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/auth + #clientId: authserver + #clientSecret: ae8c99a9-f98d-41e9-8fb2-d348acb987e0 + # for ketrock configure + accessTokenUri: http://172.30.64.120:3000/oauth2/token + userAuthorizationUri: http://172.30.64.120:3000/oauth2/authorize + clientId: a2034c11-d2a9-4cab-9fac-ff65425bd53f + clientSecret: 7364baec-6d6f-4307-8c71-d66e1e6c3afc + resource: + # for keycloak configure + #userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo + # for keycloak configure + userInfoUri: http://172.30.64.120:3000/user +selfhostcorecontext: http://localhost:9090/corecontext diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/config/c-sources.yml b/scorpio-broker/AllInOneRunner/src/main/resources/config/c-sources.yml new file mode 100644 index 0000000000000000000000000000000000000000..158dcc0a49b85da02d5fabc2c56caf69abee0597 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/config/c-sources.yml @@ -0,0 +1,57 @@ +management: + security: + enabled: false + +security: + sessions: NEVER + oauth2: + resource: + userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo +# Configure resources +secEnabled: false +auth: + - api: /* + role: + - USER_G + - USER_PG + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + GET + - api: /* + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: /* + role: + - USER_GUD + - USER_PUG + - ADMIN_PUGD + method: + PATCH + - api: /* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE + - api: / + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: / + role: + - USER_G + - USER_PG + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + GET \ No newline at end of file diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/config/entity-manager.yml b/scorpio-broker/AllInOneRunner/src/main/resources/config/entity-manager.yml new file mode 100644 index 0000000000000000000000000000000000000000..081df638cbdacb705a84c53495fdc8b1063e0253 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/config/entity-manager.yml @@ -0,0 +1,52 @@ +management: + security: + enabled: false + +security: + sessions: NEVER + oauth2: + resource: + userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo +# Configure resources +secEnabled: false +auth: + - api: /* + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: /* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE + - api: /*/attrs + role: + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + PATCH + - api: /*/attrs + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: /*/attrs/* + role: + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + PATCH + - api: /*/attrs/* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE \ No newline at end of file diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/config/history-manager.yml b/scorpio-broker/AllInOneRunner/src/main/resources/config/history-manager.yml new file mode 100644 index 0000000000000000000000000000000000000000..3a2be36c6024778c8b220f673564ae4ce343ce25 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/config/history-manager.yml @@ -0,0 +1,68 @@ +management: + security: + enabled: false + +security: + sessions: NEVER + oauth2: + resource: + userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo +# Configure resources +secEnabled: false +auth: + - api: /entities + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: /entities + role: + - USER_G + - USER_PG + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + GET + - api: /entities/* + role: + - USER_G + - USER_PG + - USER_PUG + - USER_GUD + method: + GET + - api: /entities/* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE + - api: /entities/*/attrs/ + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: /entities/*/attrs/* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE + - api: /entities/*/attrs/*/* + role: + - USER_GUD + - USER_PUG + - ADMIN_PUGD + method: + PATCH + - api: /entities/*/attrs/*/* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE \ No newline at end of file diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/config/ngsi-ld-core-context.jsonld b/scorpio-broker/AllInOneRunner/src/main/resources/config/ngsi-ld-core-context.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..fb43242d5a69cf2b5c0e7f22a45e85c1750f1368 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/config/ngsi-ld-core-context.jsonld @@ -0,0 +1,158 @@ +{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "id": "@id", + "type": "@type", + "value": "https://uri.etsi.org/ngsi-ld/hasValue", + "object": { + "@id": "https://uri.etsi.org/ngsi-ld/hasObject", + "@type":"@id" + }, + "Property": "https://uri.etsi.org/ngsi-ld/Property", + "Relationship": "https://uri.etsi.org/ngsi-ld/Relationship", + "DateTime": "https://uri.etsi.org/ngsi-ld/DateTime", + "Date": "https://uri.etsi.org/ngsi-ld/Date", + "Time": "https://uri.etsi.org/ngsi-ld/Time", + "createdAt": { + "@id": "https://uri.etsi.org/ngsi-ld/createdAt", + "@type": "DateTime" + }, + "modifiedAt": { + "@id": "https://uri.etsi.org/ngsi-ld/modifiedAt", + "@type": "DateTime" + }, + "observedAt": { + "@id": "https://uri.etsi.org/ngsi-ld/observedAt", + "@type": "DateTime" + }, + "datasetId": { + "@id": "https://uri.etsi.org/ngsi-ld/datasetId", + "@type": "@id" + }, + "instanceId": { + "@id": "https://uri.etsi.org/ngsi-ld/instanceId", + "@type": "@id" + }, + "unitCode": "https://uri.etsi.org/ngsi-ld/unitCode", + "location": "https://uri.etsi.org/ngsi-ld/location", + "observationSpace": "https://uri.etsi.org/ngsi-ld/observationSpace", + "operationSpace": "https://uri.etsi.org/ngsi-ld/operationSpace", + "GeoProperty": "https://uri.etsi.org/ngsi-ld/GeoProperty", + "TemporalProperty": "https://uri.etsi.org/ngsi-ld/TemporalProperty", + "ContextSourceRegistration": "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration", + "Subscription": "https://uri.etsi.org/ngsi-ld/Subscription", + "Notification": "https://uri.etsi.org/ngsi-ld/Notification", + "ContextSourceNotification": "https://uri.etsi.org/ngsi-ld/ContextSourceNotification", + "title": "https://uri.etsi.org/ngsi-ld/title", + "detail": "https://uri.etsi.org/ngsi-ld/detail", + "idPattern": "https://uri.etsi.org/ngsi-ld/idPattern", + "name": "https://uri.etsi.org/ngsi-ld/name", + "description": "https://uri.etsi.org/ngsi-ld/description", + "information": "https://uri.etsi.org/ngsi-ld/information", + "observationInterval": "https://uri.etsi.org/ngsi-ld/observationInterval", + "managementInterval": "https://uri.etsi.org/ngsi-ld/managementInterval", + "expires": { + "@id": "https://uri.etsi.org/ngsi-ld/expires", + "@type": "DateTime" + }, + "endpoint": "https://uri.etsi.org/ngsi-ld/endpoint", + "entities": "https://uri.etsi.org/ngsi-ld/entities", + "properties": { + "@id": "https://uri.etsi.org/ngsi-ld/properties", + "@type": "@vocab" + }, + "relationships": { + "@id": "https://uri.etsi.org/ngsi-ld/relationships", + "@type": "@vocab" + }, + "start": { + "@id": "https://uri.etsi.org/ngsi-ld/start", + "@type": "DateTime" + }, + "end": { + "@id": "https://uri.etsi.org/ngsi-ld/end", + "@type": "DateTime" + }, + "watchedAttributes":{ + "@id": "https://uri.etsi.org/ngsi-ld/watchedAttributes", + "@type": "@vocab" + }, + "timeInterval": "https://uri.etsi.org/ngsi-ld/timeInterval", + "q": "https://uri.etsi.org/ngsi-ld/q", + "geoQ": "https://uri.etsi.org/ngsi-ld/geoQ", + "csf": "https://uri.etsi.org/ngsi-ld/csf", + "isActive": "https://uri.etsi.org/ngsi-ld/isActive", + "notification": "https://uri.etsi.org/ngsi-ld/notification", + "status": "https://uri.etsi.org/ngsi-ld/status", + "throttling": "https://uri.etsi.org/ngsi-ld/throttling", + "temporalQ": "https://uri.etsi.org/ngsi-ld/temporalQ", + "geometry": "https://uri.etsi.org/ngsi-ld/geometry", + "coordinates": "https://uri.etsi.org/ngsi-ld/coordinates", + "georel": "https://uri.etsi.org/ngsi-ld/georel", + "geoproperty": "https://uri.etsi.org/ngsi-ld/geoproperty", + "attributes": { + "@id": "https://uri.etsi.org/ngsi-ld/attributes", + "@type": "@vocab" + }, + "format": "https://uri.etsi.org/ngsi-ld/format", + "timesSent": "https://uri.etsi.org/ngsi-ld/timesSent", + "lastNotification":{ + "@id": "https://uri.etsi.org/ngsi-ld/lastNotification", + "@type": "DateTime" + }, + "lastFailure":{ + "@id": "https://uri.etsi.org/ngsi-ld/lastFailure", + "@type": "DateTime" + }, + "lastSuccess":{ + "@id": "https://uri.etsi.org/ngsi-ld/lastSuccess", + "@type": "DateTime" + }, + "uri": "https://uri.etsi.org/ngsi-ld/uri", + "accept": "https://uri.etsi.org/ngsi-ld/accept", + "success": { + "@id": "https://uri.etsi.org/ngsi-ld/success", + "@type": "@id" + }, + "errors": "https://uri.etsi.org/ngsi-ld/errors", + "error": "https://uri.etsi.org/ngsi-ld/error", + "entityId": { + "@id": "https://uri.etsi.org/ngsi-ld/entityId", + "@type": "@id" + }, + "updated": "https://uri.etsi.org/ngsi-ld/updated", + "unchanged": "https://uri.etsi.org/ngsi-ld/unchanged", + "attributeName": "https://uri.etsi.org/ngsi-ld/attributeName", + "reason": "https://uri.etsi.org/ngsi-ld/reason", + "timerel": "https://uri.etsi.org/ngsi-ld/timerel", + "time": { + "@id": "https://uri.etsi.org/ngsi-ld/time", + "@type": "DateTime" + }, + "endTime": { + "@id": "https://uri.etsi.org/ngsi-ld/endTime", + "@type": "DateTime" + }, + "timeproperty": "https://uri.etsi.org/ngsi-ld/timeproperty", + "subscriptionId": { + "@id": "https://uri.etsi.org/ngsi-ld/subscriptionId", + "@type": "@id" + }, + "notifiedAt":{ + "@id": "https://uri.etsi.org/ngsi-ld/notifiedAt", + "@type": "DateTime" + }, + "data": "https://uri.etsi.org/ngsi-ld/data", + "triggerReason": "https://uri.etsi.org/ngsi-ld/triggerReason", + "values":{ + "@id": "https://uri.etsi.org/ngsi-ld/hasValues", + "@container": "@list" + }, + "objects":{ + "@id": "https://uri.etsi.org/ngsi-ld/hasObjects", + "@type": "@id", + "@container": "@list" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } +} diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/config/query-manager.yml b/scorpio-broker/AllInOneRunner/src/main/resources/config/query-manager.yml new file mode 100644 index 0000000000000000000000000000000000000000..485eab6e15cdeeb10ecfe8eb49464f856d57271a --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/config/query-manager.yml @@ -0,0 +1,21 @@ +management: + security: + enabled: false + +security: + sessions: NEVER + oauth2: + resource: + userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo +# Configure resources +secEnabled: false +auth: + - api: /* + role: + - USER_G + - USER_PG + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + GET \ No newline at end of file diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/config/subscription-manager.yml b/scorpio-broker/AllInOneRunner/src/main/resources/config/subscription-manager.yml new file mode 100644 index 0000000000000000000000000000000000000000..0ae20cc00530d47fc0c34d9347dd010dd5b0ad37 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/config/subscription-manager.yml @@ -0,0 +1,50 @@ +management: + security: + enabled: false + +security: + sessions: NEVER + oauth2: + resource: + userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo +# Configure resources +secEnabled: false +auth: + - api: / + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: / + role: + - USER_G + - USER_PG + - USER_GUD + - USER_PUG + - ADMIN_PUGD + method: + GET + - api: /* + role: + - USER_G + - USER_PG + - USER_GUD + - USER_PUG + - ADMIN_PUGD + method: + GET + - api: /* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE + - api: /* + role: + - USER_GUD + - USER_PUG + - ADMIN_PUGD + method: + PATCH \ No newline at end of file diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/config/testmanager.yml b/scorpio-broker/AllInOneRunner/src/main/resources/config/testmanager.yml new file mode 100644 index 0000000000000000000000000000000000000000..081df638cbdacb705a84c53495fdc8b1063e0253 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/config/testmanager.yml @@ -0,0 +1,52 @@ +management: + security: + enabled: false + +security: + sessions: NEVER + oauth2: + resource: + userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo +# Configure resources +secEnabled: false +auth: + - api: /* + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: /* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE + - api: /*/attrs + role: + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + PATCH + - api: /*/attrs + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: /*/attrs/* + role: + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + PATCH + - api: /*/attrs/* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE \ No newline at end of file diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190604.1__entity.sql b/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190604.1__entity.sql new file mode 100644 index 0000000000000000000000000000000000000000..a8259f8bf603988add0925c6985b979640cdc13e --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190604.1__entity.sql @@ -0,0 +1,57 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS Entity ( + id TEXT NOT NULL, + type TEXT, + data JSONB NOT NULL, + context JSONB, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + observationSpace GEOMETRY(Geometry, 4326), + operationSpace GEOMETRY(Geometry, 4326), + PRIMARY KEY (id)) +; + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER entity_extract_jsonb_fields BEFORE INSERT OR UPDATE ON entity + FOR EACH ROW EXECUTE PROCEDURE entity_extract_jsonb_fields(); + +-- create indexes for performance +CREATE INDEX i_entity_type ON entity (type); +CREATE INDEX i_entity_createdat ON entity (createdat); +CREATE INDEX i_entity_modifiedat ON entity (modifiedat); +CREATE INDEX i_entity_location ON entity USING GIST (location); +CREATE INDEX i_entity_observationspace ON entity USING GIST (observationspace); +CREATE INDEX i_entity_operationspace ON entity USING GIST (operationspace); + +-- to check if this index will be used by the database optimizer, or if it should be applied only for for certain keys +-- check https://www.postgresql.org/docs/current/static/datatype-json.html +CREATE INDEX i_entity_data ON entity USING GIN (data); diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190604.2__registry.sql b/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190604.2__registry.sql new file mode 100644 index 0000000000000000000000000000000000000000..51d767f1a6ee0368765817ebce039132d1160bd9 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190604.2__registry.sql @@ -0,0 +1,135 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS csource ( + id TEXT NOT NULL, + data JSONB NOT NULL, + type TEXT, + name TEXT, + description TEXT, + timestamp_start TIMESTAMP, + timestamp_end TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + expires TIMESTAMP, + endpoint TEXT, + internal boolean default false, + has_registrationinfo_with_attrs_only BOOL NOT NULL DEFAULT FALSE, + has_registrationinfo_with_entityinfo_only BOOL NOT NULL DEFAULT FALSE, + PRIMARY KEY (id)) +; + +-- create indexes for performance +CREATE INDEX i_csource_data ON csource USING GIN (data); +CREATE INDEX i_csource_name ON csource (name); +CREATE INDEX i_csource_timestamp_start ON csource (timestamp_start); +CREATE INDEX i_csource_timestamp_end ON csource (timestamp_end); +CREATE INDEX i_csource_location ON csource USING GIST (location); +CREATE INDEX i_csource_expires ON csource (expires); +CREATE INDEX i_csource_endpoint ON csource (endpoint); +CREATE INDEX i_csource_internal ON csource (internal); + +CREATE TABLE IF NOT EXISTS csourceinformation ( + id BIGSERIAL, + csource_id TEXT NOT NULL REFERENCES csource(id) ON DELETE CASCADE ON UPDATE CASCADE, + group_id BIGINT, + entity_id TEXT, + entity_idpattern TEXT, + entity_type TEXT, + property_id TEXT, + relationship_id TEXT, + PRIMARY KEY (id)) +; +CREATE SEQUENCE csourceinformation_group_id_seq OWNED BY csourceinformation.group_id; -- used by csource trigger +-- create indexes for performance +CREATE INDEX i_csourceinformation_csource_id ON csourceinformation (csource_id); +CREATE INDEX i_csourceinformation_entity_type_id_idpattern ON csourceinformation (entity_type, entity_id, entity_idpattern); +CREATE INDEX i_csourceinformation_entity_type_id ON csourceinformation (entity_type, entity_id); +CREATE INDEX i_csourceinformation_entity_type_idpattern ON csourceinformation (entity_type, entity_idpattern); +CREATE INDEX i_csourceinformation_property_id ON csourceinformation (property_id); +CREATE INDEX i_csourceinformation_relationship_id ON csourceinformation (relationship_id); +CREATE INDEX i_csourceinformation_group_property_relationship ON csourceinformation (group_id, property_id, relationship_id); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields_to_information_table(); \ No newline at end of file diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190604.3__temporal.sql b/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190604.3__temporal.sql new file mode 100644 index 0000000000000000000000000000000000000000..bc9d603a2b3ed51124507c18e77df266bb80c91a --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190604.3__temporal.sql @@ -0,0 +1,105 @@ +BEGIN; + +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS temporalentity ( + id TEXT NOT NULL, + type TEXT, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + PRIMARY KEY (id)) +; + +CREATE TABLE IF NOT EXISTS temporalentityattrinstance ( + internalid BIGSERIAL, + temporalentity_id TEXT NOT NULL REFERENCES temporalentity(id) ON DELETE CASCADE ON UPDATE CASCADE, + attributeid TEXT NOT NULL, + instanceid TEXT, + attributetype TEXT, + value TEXT, -- object (relationship) is also stored here + geovalue GEOMETRY, + createdat TIMESTAMP, + modifiedat TIMESTAMP, + observedat TIMESTAMP, + data JSONB NOT NULL, + static BOOL NOT NULL, + PRIMARY KEY (internalid)) +; +CREATE UNIQUE INDEX i_temporalentityattrinstance_entityid_attributeid_instanceid ON temporalentityattrinstance (temporalentity_id, attributeid, instanceid); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_extract_jsonb_fields BEFORE INSERT OR UPDATE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_update_static() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + f_count integer; + BEGIN + select into f_internalid, f_count min(internalid), count(1) from temporalentityattrinstance + where temporalentity_id = OLD.temporalentity_id AND attributeid = OLD.attributeid; + IF (f_count = 1) THEN + UPDATE temporalentityattrinstance SET static = true WHERE internalid = f_internalid; + END IF; + RETURN OLD; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_update_static AFTER DELETE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_update_static(); + +-- create indexes for performance + +CREATE INDEX i_temporalentity_type ON temporalentity (type); + +CREATE INDEX i_temporalentityattrinstance_data ON temporalentityattrinstance USING GIN (data); + +COMMIT; \ No newline at end of file diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190611.1__sysattrs.sql b/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190611.1__sysattrs.sql new file mode 100644 index 0000000000000000000000000000000000000000..3e24ff4111eb19227e14f1b3a2b90a2d863f337e --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190611.1__sysattrs.sql @@ -0,0 +1,7 @@ +-- entity +ALTER TABLE entity ALTER data DROP NOT NULL; +ALTER TABLE entity ADD data_without_sysattrs JSONB; + +-- csource +ALTER TABLE csource ALTER data DROP NOT NULL; +ALTER TABLE csource ADD data_without_sysattrs JSONB; diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190611.2__extract_functions_optimization.sql b/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190611.2__extract_functions_optimization.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ab6fe3c66556cde740eaf56948e95d4556fa7c9 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190611.2__extract_functions_optimization.sql @@ -0,0 +1,121 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190703.1__keyvalues.sql b/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190703.1__keyvalues.sql new file mode 100644 index 0000000000000000000000000000000000000000..fae8d021431fadf39732600f684e69f5aa43447a --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190703.1__keyvalues.sql @@ -0,0 +1 @@ +ALTER TABLE entity ADD kvdata JSONB; diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190704.1__extract_functions_bugfix.sql b/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190704.1__extract_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..13b2ff5ba21ce08cd58465e6b7b9240c592f6f5c --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/db/migration/V20190704.1__extract_functions_bugfix.sql @@ -0,0 +1,131 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/AllInOneRunner/src/main/resources/ngsi-ld-core-context.jsonld b/scorpio-broker/AllInOneRunner/src/main/resources/ngsi-ld-core-context.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..fb43242d5a69cf2b5c0e7f22a45e85c1750f1368 --- /dev/null +++ b/scorpio-broker/AllInOneRunner/src/main/resources/ngsi-ld-core-context.jsonld @@ -0,0 +1,158 @@ +{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "id": "@id", + "type": "@type", + "value": "https://uri.etsi.org/ngsi-ld/hasValue", + "object": { + "@id": "https://uri.etsi.org/ngsi-ld/hasObject", + "@type":"@id" + }, + "Property": "https://uri.etsi.org/ngsi-ld/Property", + "Relationship": "https://uri.etsi.org/ngsi-ld/Relationship", + "DateTime": "https://uri.etsi.org/ngsi-ld/DateTime", + "Date": "https://uri.etsi.org/ngsi-ld/Date", + "Time": "https://uri.etsi.org/ngsi-ld/Time", + "createdAt": { + "@id": "https://uri.etsi.org/ngsi-ld/createdAt", + "@type": "DateTime" + }, + "modifiedAt": { + "@id": "https://uri.etsi.org/ngsi-ld/modifiedAt", + "@type": "DateTime" + }, + "observedAt": { + "@id": "https://uri.etsi.org/ngsi-ld/observedAt", + "@type": "DateTime" + }, + "datasetId": { + "@id": "https://uri.etsi.org/ngsi-ld/datasetId", + "@type": "@id" + }, + "instanceId": { + "@id": "https://uri.etsi.org/ngsi-ld/instanceId", + "@type": "@id" + }, + "unitCode": "https://uri.etsi.org/ngsi-ld/unitCode", + "location": "https://uri.etsi.org/ngsi-ld/location", + "observationSpace": "https://uri.etsi.org/ngsi-ld/observationSpace", + "operationSpace": "https://uri.etsi.org/ngsi-ld/operationSpace", + "GeoProperty": "https://uri.etsi.org/ngsi-ld/GeoProperty", + "TemporalProperty": "https://uri.etsi.org/ngsi-ld/TemporalProperty", + "ContextSourceRegistration": "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration", + "Subscription": "https://uri.etsi.org/ngsi-ld/Subscription", + "Notification": "https://uri.etsi.org/ngsi-ld/Notification", + "ContextSourceNotification": "https://uri.etsi.org/ngsi-ld/ContextSourceNotification", + "title": "https://uri.etsi.org/ngsi-ld/title", + "detail": "https://uri.etsi.org/ngsi-ld/detail", + "idPattern": "https://uri.etsi.org/ngsi-ld/idPattern", + "name": "https://uri.etsi.org/ngsi-ld/name", + "description": "https://uri.etsi.org/ngsi-ld/description", + "information": "https://uri.etsi.org/ngsi-ld/information", + "observationInterval": "https://uri.etsi.org/ngsi-ld/observationInterval", + "managementInterval": "https://uri.etsi.org/ngsi-ld/managementInterval", + "expires": { + "@id": "https://uri.etsi.org/ngsi-ld/expires", + "@type": "DateTime" + }, + "endpoint": "https://uri.etsi.org/ngsi-ld/endpoint", + "entities": "https://uri.etsi.org/ngsi-ld/entities", + "properties": { + "@id": "https://uri.etsi.org/ngsi-ld/properties", + "@type": "@vocab" + }, + "relationships": { + "@id": "https://uri.etsi.org/ngsi-ld/relationships", + "@type": "@vocab" + }, + "start": { + "@id": "https://uri.etsi.org/ngsi-ld/start", + "@type": "DateTime" + }, + "end": { + "@id": "https://uri.etsi.org/ngsi-ld/end", + "@type": "DateTime" + }, + "watchedAttributes":{ + "@id": "https://uri.etsi.org/ngsi-ld/watchedAttributes", + "@type": "@vocab" + }, + "timeInterval": "https://uri.etsi.org/ngsi-ld/timeInterval", + "q": "https://uri.etsi.org/ngsi-ld/q", + "geoQ": "https://uri.etsi.org/ngsi-ld/geoQ", + "csf": "https://uri.etsi.org/ngsi-ld/csf", + "isActive": "https://uri.etsi.org/ngsi-ld/isActive", + "notification": "https://uri.etsi.org/ngsi-ld/notification", + "status": "https://uri.etsi.org/ngsi-ld/status", + "throttling": "https://uri.etsi.org/ngsi-ld/throttling", + "temporalQ": "https://uri.etsi.org/ngsi-ld/temporalQ", + "geometry": "https://uri.etsi.org/ngsi-ld/geometry", + "coordinates": "https://uri.etsi.org/ngsi-ld/coordinates", + "georel": "https://uri.etsi.org/ngsi-ld/georel", + "geoproperty": "https://uri.etsi.org/ngsi-ld/geoproperty", + "attributes": { + "@id": "https://uri.etsi.org/ngsi-ld/attributes", + "@type": "@vocab" + }, + "format": "https://uri.etsi.org/ngsi-ld/format", + "timesSent": "https://uri.etsi.org/ngsi-ld/timesSent", + "lastNotification":{ + "@id": "https://uri.etsi.org/ngsi-ld/lastNotification", + "@type": "DateTime" + }, + "lastFailure":{ + "@id": "https://uri.etsi.org/ngsi-ld/lastFailure", + "@type": "DateTime" + }, + "lastSuccess":{ + "@id": "https://uri.etsi.org/ngsi-ld/lastSuccess", + "@type": "DateTime" + }, + "uri": "https://uri.etsi.org/ngsi-ld/uri", + "accept": "https://uri.etsi.org/ngsi-ld/accept", + "success": { + "@id": "https://uri.etsi.org/ngsi-ld/success", + "@type": "@id" + }, + "errors": "https://uri.etsi.org/ngsi-ld/errors", + "error": "https://uri.etsi.org/ngsi-ld/error", + "entityId": { + "@id": "https://uri.etsi.org/ngsi-ld/entityId", + "@type": "@id" + }, + "updated": "https://uri.etsi.org/ngsi-ld/updated", + "unchanged": "https://uri.etsi.org/ngsi-ld/unchanged", + "attributeName": "https://uri.etsi.org/ngsi-ld/attributeName", + "reason": "https://uri.etsi.org/ngsi-ld/reason", + "timerel": "https://uri.etsi.org/ngsi-ld/timerel", + "time": { + "@id": "https://uri.etsi.org/ngsi-ld/time", + "@type": "DateTime" + }, + "endTime": { + "@id": "https://uri.etsi.org/ngsi-ld/endTime", + "@type": "DateTime" + }, + "timeproperty": "https://uri.etsi.org/ngsi-ld/timeproperty", + "subscriptionId": { + "@id": "https://uri.etsi.org/ngsi-ld/subscriptionId", + "@type": "@id" + }, + "notifiedAt":{ + "@id": "https://uri.etsi.org/ngsi-ld/notifiedAt", + "@type": "DateTime" + }, + "data": "https://uri.etsi.org/ngsi-ld/data", + "triggerReason": "https://uri.etsi.org/ngsi-ld/triggerReason", + "values":{ + "@id": "https://uri.etsi.org/ngsi-ld/hasValues", + "@container": "@list" + }, + "objects":{ + "@id": "https://uri.etsi.org/ngsi-ld/hasObjects", + "@type": "@id", + "@container": "@list" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } +} diff --git a/scorpio-broker/BrokerParent/.gitignore b/scorpio-broker/BrokerParent/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..a1c3ab4d08c0f9f91918f21c730272a4711885e8 --- /dev/null +++ b/scorpio-broker/BrokerParent/.gitignore @@ -0,0 +1,4 @@ +/target/ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/BrokerParent/pom.xml b/scorpio-broker/BrokerParent/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..671bff752c305b8caaa21e14ff579d5cd98c1f5b --- /dev/null +++ b/scorpio-broker/BrokerParent/pom.xml @@ -0,0 +1,64 @@ + + 4.0.0 + + BrokerParent + pom + + eu.neclab.ngsildbroker + OverallParent + 1.0.0-SNAPSHOT + ../OverallParent + + + + + + + + eu.neclab.ngsildbroker + Commons + 1.0.0-SNAPSHOT + + + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-client + + + com.thoughtworks.xstream + xstream + 1.4.19 + + + + + + default + + + !skipDefault + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + repackage + + + + + + + + + + + diff --git a/scorpio-broker/CONTRIBUTING.ja.md b/scorpio-broker/CONTRIBUTING.ja.md new file mode 100644 index 0000000000000000000000000000000000000000..86ea0e1cdcbc5f64e82efaa81ae3d66fdbc59633 --- /dev/null +++ b/scorpio-broker/CONTRIBUTING.ja.md @@ -0,0 +1,9 @@ +## Scorpio Broker ã¸ã®è²¢çŒ® + +### Contributor License Agreement ã«ã‚µã‚¤ãƒ³ã‚¢ãƒƒãƒ— + +Scorpio Broker ã«è²¢çŒ®ã™ã‚‹ã“ã¨ã«èˆˆå‘³ãŒã‚ã‚‹å ´åˆã¯ã€ä»¥ä¸‹ã«ãƒªã‚¹ãƒˆã•れã¦ã„ã‚‹ Contributor License Agreement をダウンロードã—〠+ç½²åã•れãŸå¥‘ç´„ã‚’é›»å­ãƒ¡ãƒ¼ãƒ« scorpio-support@listserv.neclab.eu å®›ã«é€ä¿¡ã—ã¦ãã ã•ã„。 + +- [ScorpioBroker Entity Contributor License Agreement](https://github.com/scorpiobroker/scorpiobroker/blob/development/ScorpioBroker-Entity.pdf) +- [ScorpioBroker Individual Contributor License Agreement](https://github.com/scorpiobroker/scorpiobroker/blob/development/ScorpioBroker-Individual.pdf) diff --git a/scorpio-broker/CONTRIBUTING.md b/scorpio-broker/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..3210b3ae3851159f9b8a4f30ff85f6181e52b5d4 --- /dev/null +++ b/scorpio-broker/CONTRIBUTING.md @@ -0,0 +1,12 @@ +## Contributing to the ScorpioBroker + + +### sign up your contributor agreement license + +If you are interested to make any contribution to the ScorpioBroker, please download the contributor license agreement listed below and send us you signed agreement via email scorpio-support@listserv.neclab.eu + + +[ScorpioBroker Entity Contributor License Agreement](https://github.com/scorpiobroker/scorpiobroker/blob/development/ScorpioBroker-Entity.pdf) + + +[ScorpioBroker Individual Contributor License Agreement](https://github.com/scorpiobroker/scorpiobroker/blob/development/ScorpioBroker-Individual.pdf) diff --git a/scorpio-broker/CREDITS b/scorpio-broker/CREDITS new file mode 100644 index 0000000000000000000000000000000000000000..264bd7fdf5ebf2a306eb28b951d9ee43bc662c8c --- /dev/null +++ b/scorpio-broker/CREDITS @@ -0,0 +1,12 @@ +# Contributors in no specific order + +The scorpio development team: +Benjamin Hebgen, Parwinder Singh, Everton Luís Berz, Martin Bauer, Naveen Singh Bisht, Mohd Adeeb Khan, Kailash Adhikari, Amit Kumar Raghav, Arpit Tayal +Benjamin Hebgen +Martin Bauer +Naveen Singh Bisht +Amit Kumar Raghav +Arpit Tayal +Pawan Kumar +Michael Schulz +Jason Fox diff --git a/scorpio-broker/Commons/.gitignore b/scorpio-broker/Commons/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..a1c3ab4d08c0f9f91918f21c730272a4711885e8 --- /dev/null +++ b/scorpio-broker/Commons/.gitignore @@ -0,0 +1,4 @@ +/target/ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/Commons/pom.xml b/scorpio-broker/Commons/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..eefc800e233011ad59a6534a6b092c902644d372 --- /dev/null +++ b/scorpio-broker/Commons/pom.xml @@ -0,0 +1,74 @@ + + 4.0.0 + + eu.neclab.ngsildbroker + OverallParent + 1.0.0-SNAPSHOT + ../OverallParent + + Commons + 1.0.0-SNAPSHOT + jar + + + org.apache.httpcomponents + httpclient + + + org.apache.httpcomponents + httpcore + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-client + + + + + com.google.code.gson + gson + + + com.github.jsonld-java + jsonld-java + 0.13.0 + + + + com.google.guava + guava + 25.1-jre + + + org.springframework.cloud + spring-cloud-starter-oauth2 + + + + org.springframework.cloud + spring-cloud-starter-config + + + org.springframework + spring-jdbc + + + + com.github.filosganga + geogson-core + 1.2.21 + + + com.github.filosganga + geogson-jts + 1.2.21 + + + + \ No newline at end of file diff --git a/scorpio-broker/Commons/resources/application.yml b/scorpio-broker/Commons/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..acf7b8225f67c2435032e5648bfd3ec5fe81607a --- /dev/null +++ b/scorpio-broker/Commons/resources/application.yml @@ -0,0 +1,3 @@ +context: + default: + url: https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld \ No newline at end of file diff --git a/scorpio-broker/Commons/resources/log4j2-spring.xml b/scorpio-broker/Commons/resources/log4j2-spring.xml new file mode 100644 index 0000000000000000000000000000000000000000..50ea7ce9ef24ae626ac20a7947b5301d38b7066d --- /dev/null +++ b/scorpio-broker/Commons/resources/log4j2-spring.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + %d %p %C{1.} [%t] %m%n + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/constants/AppConstants.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/constants/AppConstants.java new file mode 100644 index 0000000000000000000000000000000000000000..c39d8f3bd8f9c26e8f1b85558afc13a38d96a7c9 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/constants/AppConstants.java @@ -0,0 +1,48 @@ +package eu.neclab.ngsildbroker.commons.constants; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** + * @version 1.0 + * @date 12-Jul-2018 + */ + +public class AppConstants { + + // entities URL for + public final static String ENTITES_URL = "/ngsi-ld/v1/entities/"; + public final static int ENTITIES_URL_ID = 0; + // csource URL + public final static String CSOURCE_URL = "/ngsi-ld/v1/csourceRegistrations/"; + public final static int CSOURCE_URL_ID = 1; + //history + public final static String HISTORY_URL="/ngsi-ld/v1/temporal/entities/"; + public final static int HISTORY_URL_ID = 2; + //subscriptions + public final static String SUBSCRIPTIONS_URL="/ngsi-ld/v1/subscriptions/"; + public final static int SUBSCRIPTIONS_URL_ID = 3; + public static final int BATCH_URL_ID = 4; + public final static int INTERNAL_CALL_ID = 5; + + public final static String NGB_APPLICATION_JSON="application/json"; + public final static String NGB_APPLICATION_NQUADS="application/n-quads"; + public final static String NGB_APPLICATION_JSONLD="application/ld+json"; + public final static String NGB_APPLICATION_GENERIC ="application/*"; + public final static String NGB_GENERIC_GENERIC ="*/*"; + + //allowed geometry types in queries params. + public final static List NGB_ALLOWED_GEOM_LIST=new ArrayList(Arrays.asList("POINT","POLYGON")); + + + public final static byte[] NULL_BYTES = "null".getBytes(); + public static final String CORE_CONTEXT_URL_SUFFIX = "ngsi-ld-core-context"; + + //constants for swagger + public final static String SWAGGER_WEBSITE_LINK = "https://github.com/ScorpioBroker/ScorpioBroker"; + public final static String SWAGGER_CONTACT_LINK = "https://github.com/ScorpioBroker/ScorpioBroker"; + + + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/constants/DBConstants.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/constants/DBConstants.java new file mode 100644 index 0000000000000000000000000000000000000000..977110326d33daf52147363d3449fa58d7374a3d --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/constants/DBConstants.java @@ -0,0 +1,95 @@ +package eu.neclab.ngsildbroker.commons.constants; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +public class DBConstants { + + public final static String DBTABLE_ENTITY = "entity"; + public final static String DBTABLE_CSOURCE = "csource"; + public final static String DBTABLE_CSOURCE_INFO = "csourceinformation"; + public final static String DBTABLE_TEMPORALENTITY = "temporalentity"; + public final static String DBTABLE_TEMPORALENTITY_ATTRIBUTEINSTANCE= "temporalentityattrinstance"; + + public final static String DBCOLUMN_DATA = "data"; + public final static String DBCOLUMN_KVDATA = "kvdata"; + public final static String DBCOLUMN_DATA_WITHOUT_SYSATTRS = "data_without_sysattrs"; + public final static String DBCOLUMN_ID = "id"; + public final static String DBCOLUMN_TYPE = "type"; + public final static String DBCOLUMN_CREATED_AT = "createdat"; + public final static String DBCOLUMN_MODIFIED_AT = "modifiedat"; + public final static String DBCOLUMN_OBSERVED_AT = "observedat"; + public final static String DBCOLUMN_LOCATION = "location"; + public final static String DBCOLUMN_OBSERVATION_SPACE = "observationspace"; + public final static String DBCOLUMN_OPERATION_SPACE = "operationspace"; + + public final static Map NGSILD_TO_SQL_RESERVED_PROPERTIES_MAPPING = initNgsildToSqlReservedPropertiesMapping(); + + public static Map initNgsildToSqlReservedPropertiesMapping() { + Map map = new HashMap<>(); + map.put(NGSIConstants.JSON_LD_ID, DBCOLUMN_ID); + map.put(NGSIConstants.JSON_LD_TYPE, DBCOLUMN_TYPE); + map.put(NGSIConstants.NGSI_LD_CREATED_AT, DBCOLUMN_CREATED_AT); + map.put(NGSIConstants.NGSI_LD_MODIFIED_AT, DBCOLUMN_MODIFIED_AT); + // the type conversion (from geometry to geojson text) changes the format (i.e. + // remove spaces in geojson), so it is better to use the original data +// map.put(NGSIConstants.NGSI_LD_LOCATION, DBCOLUMN_LOCATION); +// map.put(NGSIConstants.NGSI_LD_OBSERVATION_SPACE, DBCOLUMN_OBSERVATION_SPACE); +// map.put(NGSIConstants.NGSI_LD_OPERATION_SPACE, DBCOLUMN_OPERATION_SPACE); + return Collections.unmodifiableMap(map); + } + + public final static Map NGSILD_TO_SQL_RESERVED_PROPERTIES_MAPPING_GEO = initNgsildToSqlReservedPropertiesMappingGeo(); + + public static Map initNgsildToSqlReservedPropertiesMappingGeo() { + Map map = new HashMap<>(); + map.put(NGSIConstants.NGSI_LD_LOCATION, DBCOLUMN_LOCATION); + map.put(NGSIConstants.NGSI_LD_OBSERVATION_SPACE, DBCOLUMN_OBSERVATION_SPACE); + map.put(NGSIConstants.NGSI_LD_OPERATION_SPACE, DBCOLUMN_OPERATION_SPACE); + return Collections.unmodifiableMap(map); + } + + public final static String SQLQUERY_EQUAL = "="; + public final static String SQLQUERY_UNEQUAL = "<>"; + public final static String SQLQUERY_GREATEREQ = ">="; + public final static String SQLQUERY_GREATER = ">"; + public final static String SQLQUERY_LESSEQ = "<="; + public final static String SQLQUERY_LESS = "<"; + + public final static Map NGSILD_TO_SQL_OPERATORS_MAPPING = initNgsildToSqlOperatorsMapping(); + + public static Map initNgsildToSqlOperatorsMapping() { + Map map = new HashMap<>(); + map.put(NGSIConstants.QUERY_EQUAL, SQLQUERY_EQUAL); + map.put(NGSIConstants.QUERY_UNEQUAL, SQLQUERY_UNEQUAL); + map.put(NGSIConstants.QUERY_GREATEREQ, SQLQUERY_GREATEREQ); + map.put(NGSIConstants.QUERY_GREATER, SQLQUERY_GREATER); + map.put(NGSIConstants.QUERY_LESSEQ, SQLQUERY_LESSEQ); + map.put(NGSIConstants.QUERY_LESS, SQLQUERY_LESS); + return Collections.unmodifiableMap(map); + } + + public final static String POSTGIS_NEAR = "ST_DWithin"; + public final static String POSTGIS_WITHIN = "ST_Within"; + public final static String POSTGIS_CONTAINS = "ST_Contains"; + public final static String POSTGIS_OVERLAPS = "ST_Overlaps"; + public final static String POSTGIS_INTERSECTS = "ST_Intersects"; + public final static String POSTGIS_EQUALS = "ST_Equals"; + public final static String POSTGIS_DISJOINT = "ST_Disjoint"; + + public final static Map NGSILD_TO_POSTGIS_GEO_OPERATORS_MAPPING = initNgsildToPostgisGeoOperatorsMapping(); + + public static Map initNgsildToPostgisGeoOperatorsMapping() { + Map map = new HashMap<>(); + map.put(NGSIConstants.GEO_REL_NEAR, POSTGIS_NEAR); + map.put(NGSIConstants.GEO_REL_WITHIN, POSTGIS_WITHIN); + map.put(NGSIConstants.GEO_REL_CONTAINS, POSTGIS_CONTAINS); + map.put(NGSIConstants.GEO_REL_OVERLAPS, POSTGIS_OVERLAPS); + map.put(NGSIConstants.GEO_REL_INTERSECTS, POSTGIS_INTERSECTS); + map.put(NGSIConstants.GEO_REL_EQUALS, POSTGIS_EQUALS); + map.put(NGSIConstants.GEO_REL_DISJOINT, POSTGIS_DISJOINT); + return Collections.unmodifiableMap(map); + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/constants/KafkaConstants.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/constants/KafkaConstants.java new file mode 100644 index 0000000000000000000000000000000000000000..43f7dc179f8f4cea45e030950fb2d015c1cacf0c --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/constants/KafkaConstants.java @@ -0,0 +1,20 @@ +package eu.neclab.ngsildbroker.commons.constants; + +public interface KafkaConstants { + + public static final String ENTITY_TOPIC = "ENTITY"; + public static final String QUERY_TOPIC = "QUERY"; + public static final String QUERY_RESULT_TOPIC = "QUERYRESULT"; + public static final String CREATE_TOPIC = "CREATE"; + public static final String UPDATE_TOPIC = "UPDATE"; + public static final String APPEND_TOPIC = "APPEND"; + public static final String DELETE_TOPIC = "DELETE"; + public static final String SUBSCRIPTIONS_TOPIC = "SUBSCRIPTIONS"; + public static final String CSOURCE_SUBSCRIPTIONS_TOPIC = "CSOURCE_SUBSCRIPTIONS"; + public static final String ATCONTEXT_TOPIC = "ATCONTEXT"; + public static final String PAGINATION_TOPIC = "PAGINATION"; + + + + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/constants/NGSIConstants.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/constants/NGSIConstants.java new file mode 100644 index 0000000000000000000000000000000000000000..b2c9f3a4489c28a431cc4e5600621657361e7a9e --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/constants/NGSIConstants.java @@ -0,0 +1,218 @@ +package eu.neclab.ngsildbroker.commons.constants; + +import java.util.HashMap; +import java.util.List; + +import java.util.Arrays; + +public interface NGSIConstants { + public final static String GEO_REL_TYPE = "type"; + public final static String GEO_REL_REL = "rel"; + public final static String GEO_REL_NEAR = "near"; + public final static String GEO_REL_WITHIN = "within"; + public final static String GEO_REL_CONTAINS = "contains"; + public final static String GEO_REL_INTERSECTS = "intersects"; + public final static String GEO_REL_EQUALS = "equals"; + public final static String GEO_REL_DISJOINT = "disjoint"; + public final static String GEO_REL_OVERLAPS = "overlaps"; + public final static String GEO_REL_MAX_DISTANCE = "maxDistance"; + public final static String GEO_REL_MIN_DISTANCE = "minDistance"; + + public final static String GEO_TYPE_POINT = "Point"; + public final static String GEO_TYPE_POLYGON = "Polygon"; + public final static String GEO_TYPE_LINESTRING = "LineString"; + public final static String GEO_TYPE_MULTI_POLYGON = "MultiPolygon"; + + public final static String TIME_REL_BEFORE = "before"; + public final static String TIME_REL_AFTER = "after"; + public final static String TIME_REL_BETWEEN = "between"; + + public final static String CSOURCE_INFORMATION = "information"; + public final static String CSOURCE_DESCRIPTION = "description"; + public final static String CSOURCE_ENDPOINT = "endpoint"; + public final static String CSOURCE_EXPIRES = "expires"; + public final static String CSOURCE_NAME = "name"; + public final static String CSOURCE_TYPE = "type"; + public final static String CSOURCE_TIMESTAMP = "timestamp"; + public final static String CSOURCE_COORDINATES = "coordinates"; + + public final static String OBJECT = "object"; + public final static String JSON_LD_ID = "@id"; + public final static String JSON_LD_TYPE = "@type"; + public final static String JSON_LD_VALUE = "@value"; + public final static String JSON_LD_CONTEXT = "@context"; + public final static String NGSI_LD_DEFAULT_PREFIX = "https://uri.etsi.org/ngsi-ld/default-context/"; + public final static String NGSI_LD_RELATIONSHIP = "https://uri.etsi.org/ngsi-ld/Relationship"; + public final static String NGSI_LD_PROPERTY = "https://uri.etsi.org/ngsi-ld/Property"; + public final static String NGSI_LD_HAS_VALUE = "https://uri.etsi.org/ngsi-ld/hasValue"; + public final static String NGSI_LD_HAS_OBJECT = "https://uri.etsi.org/ngsi-ld/hasObject"; + public final static String NGSI_LD_COORDINATES = "https://uri.etsi.org/ngsi-ld/coordinates"; + public final static String NGSI_LD_GEOPROPERTY = "https://uri.etsi.org/ngsi-ld/GeoProperty"; + public final static String NGSI_LD_GEOPROPERTY_SHORT = "GeoProperty"; + public final static String NGSI_LD_LOCATION = "https://uri.etsi.org/ngsi-ld/location"; + public final static String NGSI_LD_LOCATION_SHORT = "location"; + public final static String NGSI_LD_CREATED_AT = "https://uri.etsi.org/ngsi-ld/createdAt"; + public final static String NGSI_LD_MODIFIED_AT = "https://uri.etsi.org/ngsi-ld/modifiedAt"; + public final static String NGSI_LD_OBSERVED_AT = "https://uri.etsi.org/ngsi-ld/observedAt"; + public final static String NGSI_LD_OBSERVATION_SPACE = "https://uri.etsi.org/ngsi-ld/observationSpace"; + public final static String NGSI_LD_OPERATION_SPACE = "https://uri.etsi.org/ngsi-ld/operationSpace"; + public final static String NGSI_LD_ATTRIBUTES = "https://uri.etsi.org/ngsi-ld/attributes"; + public final static String NGSI_LD_DATE_TIME = "https://uri.etsi.org/ngsi-ld/DateTime"; + public final static String NGSI_LD_DATE = "https://uri.etsi.org/ngsi-ld/Date"; + public final static String NGSI_LD_TIME = "https://uri.etsi.org/ngsi-ld/Time"; + public final static String NGSI_LD_INFORMATION = "https://uri.etsi.org/ngsi-ld/information"; + public final static String NGSI_LD_RELATIONSHIPS = "https://uri.etsi.org/ngsi-ld/relationships"; + public final static String NGSI_LD_PROPERTIES = "https://uri.etsi.org/ngsi-ld/properties"; + public final static String NGSI_LD_INSTANCE_ID = "https://uri.etsi.org/ngsi-ld/instanceId"; + + public final static String NGSI_LD_ID_PATTERN = "https://uri.etsi.org/ngsi-ld/idPattern"; + public final static String NGSI_LD_ENTITIES = "https://uri.etsi.org/ngsi-ld/entities"; + public final static String NGSI_LD_GEOMETRY = "https://uri.etsi.org/ngsi-ld/geometry"; + public final static String NGSI_LD_GEO_QUERY = "https://uri.etsi.org/ngsi-ld/geoQ"; + public final static String NGSI_LD_ACCEPT = "https://uri.etsi.org/ngsi-ld/accept"; + public final static String NGSI_LD_URI = "https://uri.etsi.org/ngsi-ld/uri"; + public final static String NGSI_LD_ENDPOINT = "https://uri.etsi.org/ngsi-ld/endpoint"; + public final static String NGSI_LD_FORMAT = "https://uri.etsi.org/ngsi-ld/format"; + public final static String NGSI_LD_NOTIFICATION = "https://uri.etsi.org/ngsi-ld/notification"; + public final static String NGSI_LD_QUERY = "https://uri.etsi.org/ngsi-ld/q"; + public final static String NGSI_LD_WATCHED_ATTRIBUTES = "https://uri.etsi.org/ngsi-ld/watchedAttributes"; + public final static String NGSI_LD_WATCHED_ATTRIBUTES_SHORT = "watchedAttributes"; + public final static String NGSI_LD_ENTITIES_SHORT = "entities"; + public final static String NGSI_LD_ATTRIBUTES_SHORT = "attributes"; + public final static String NGSI_LD_NAME = "https://uri.etsi.org/ngsi-ld/name"; + public final static String NGSI_LD_THROTTLING = "https://uri.etsi.org/ngsi-ld/throttling"; + public final static String NGSI_LD_TIME_INTERVAL = "https://uri.etsi.org/ngsi-ld/timeInterval"; + public final static String NGSI_LD_EXPIRES = "https://uri.etsi.org/ngsi-ld/expires"; + public final static String NGSI_LD_STATUS = "https://uri.etsi.org/ngsi-ld/status"; + public final static String NGSI_LD_DESCRIPTION = "https://uri.etsi.org/ngsi-ld/description"; + public final static String NGSI_LD_GEO_REL = "https://uri.etsi.org/ngsi-ld/georel"; + public final static String NGSI_LD_TIME_STAMP = "https://uri.etsi.org/ngsi-ld/default-context/timestamp"; + public final static String NGSI_LD_TIMESTAMP_START = "https://uri.etsi.org/ngsi-ld/start"; + public final static String NGSI_LD_TIMESTAMP_END = "https://uri.etsi.org/ngsi-ld/end"; + public final static String NGSI_LD_POLYOGN = "https://uri.etsi.org/ngsi-ld/Polygon"; + public final static String NGSI_LD_POINT = "https://uri.etsi.org/ngsi-ld/Point"; + public final static String NGSI_LD_LINESTRING = "https://uri.etsi.org/ngsi-ld/LineString"; + public final static String NGSI_LD_SUBSCRIPTION_ID = "https://uri.etsi.org/ngsi-ld/subscriptionId"; + public final static String NGSI_LD_NOTIFIED_AT = "https://uri.etsi.org/ngsi-ld/notifiedAt"; + public final static String NGSI_LD_DATA = "https://uri.etsi.org/ngsi-ld/data"; + public final static String NGSI_LD_INTERNAL = "https://uri.etsi.org/ngsi-ld/internal"; + public final static String NGSI_LD_CSOURCE_REGISTRATION = "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration"; + public final static String NGSI_LD_CSOURCE_REGISTRATION_SHORT = "ContextSourceRegistration"; + public final static String NGSI_LD_SUBSCRIPTION = "https://uri.etsi.org/ngsi-ld/Subscription"; + public final static String NGSI_LD_SUBSCRIPTION_SHORT = "Subscription"; + public final static String NGSI_LD_LAST_NOTIFICATION = "https://uri.etsi.org/ngsi-ld/lastNotification"; + public final static String NGSI_LD_LAST_FAILURE = "https://uri.etsi.org/ngsi-ld/lastFailure "; + public final static String NGSI_LD_LAST_SUCCESS = "https://uri.etsi.org/ngsi-ld/lastSuccess"; + public final static String NGSI_LD_TIMES_SEND = "https://uri.etsi.org/ngsi-ld/timesSent"; + public final static String NGSI_LD_UNIT_CODE = "https://uri.etsi.org/ngsi-ld/unitCode"; + public final static String NGSI_LD_DATA_SET_ID = "https://uri.etsi.org/ngsi-ld/datasetId"; + public final static String NGSI_LD_IS_ACTIVE = "https://uri.etsi.org/ngsi-ld/isActive"; + // IMPORTANT! DO NOT MESS UP THIS ORDER!!! ONLY APPEND ON THE END NEW STUFF + public final static String[] NGSI_LD_PAYLOAD_KEYS = { JSON_LD_ID, JSON_LD_TYPE, JSON_LD_CONTEXT, + NGSI_LD_DEFAULT_PREFIX, NGSI_LD_HAS_VALUE, NGSI_LD_HAS_OBJECT, JSON_LD_VALUE, NGSI_LD_LOCATION, + NGSI_LD_CREATED_AT, NGSI_LD_MODIFIED_AT, NGSI_LD_OBSERVED_AT, NGSI_LD_OBSERVATION_SPACE, + NGSI_LD_OPERATION_SPACE, NGSI_LD_ATTRIBUTES, NGSI_LD_INFORMATION, NGSI_LD_INSTANCE_ID, NGSI_LD_COORDINATES, + NGSI_LD_ID_PATTERN, NGSI_LD_ENTITIES, NGSI_LD_GEOMETRY, NGSI_LD_GEO_QUERY, NGSI_LD_ACCEPT, NGSI_LD_URI, + NGSI_LD_ENDPOINT, NGSI_LD_FORMAT, NGSI_LD_NOTIFICATION, NGSI_LD_QUERY, NGSI_LD_WATCHED_ATTRIBUTES, + NGSI_LD_NAME, NGSI_LD_THROTTLING, NGSI_LD_TIME_INTERVAL, NGSI_LD_EXPIRES, NGSI_LD_STATUS, + NGSI_LD_DESCRIPTION, NGSI_LD_GEO_REL, NGSI_LD_TIME_STAMP, NGSI_LD_TIMESTAMP_START, NGSI_LD_TIMESTAMP_END, + NGSI_LD_SUBSCRIPTION_ID, NGSI_LD_NOTIFIED_AT, NGSI_LD_DATA, NGSI_LD_INTERNAL, NGSI_LD_LAST_NOTIFICATION, + NGSI_LD_LAST_FAILURE, NGSI_LD_LAST_SUCCESS, NGSI_LD_TIMES_SEND, NGSI_LD_UNIT_CODE, NGSI_LD_DATA_SET_ID }; + + public final static String[] NGSI_LD_SUBSCRIPTON_PAYLOAD_KEYS = { JSON_LD_ID, JSON_LD_TYPE, JSON_LD_CONTEXT, + NGSI_LD_ENTITIES, NGSI_LD_ID_PATTERN, NGSI_LD_GEO_QUERY, NGSI_LD_NOTIFICATION, NGSI_LD_ATTRIBUTES, + NGSI_LD_ENDPOINT, NGSI_LD_ACCEPT, NGSI_LD_URI, NGSI_LD_FORMAT, NGSI_LD_QUERY, NGSI_LD_WATCHED_ATTRIBUTES, + NGSI_LD_TIMES_SEND, NGSI_LD_THROTTLING, NGSI_LD_TIME_INTERVAL, NGSI_LD_EXPIRES, NGSI_LD_STATUS, + NGSI_LD_DESCRIPTION, NGSI_LD_IS_ACTIVE, NGSI_LD_TIMESTAMP_END, NGSI_LD_TIMESTAMP_START }; + + public final static String GEO_JSON_COORDINATES = "coordinates"; + public final static String GEO_JSON_TYPE = "type"; + + public final static String VALUE = "value"; + + // Entity validation attribute types + public final static String VALID_NGSI_ATTRIBUTE_TYPES = "Relationship,Property,DateTime"; + + // url decode format + public final static String ENCODE_FORMAT = "UTF-8"; + // query parameter url + public final static String QUERY_URL = "entities/?"; + + // query parameter + public final static String QUERY_PARAMETER_TYPE = "type"; + public final static String QUERY_PARAMETER_ID = "id"; + public final static String QUERY_PARAMETER_IDPATTERN = "idPattern"; + public final static String QUERY_PARAMETER_ATTRS = "attrs"; + public final static String QUERY_PARAMETER_QUERY = "q"; + public final static String QUERY_PARAMETER_GEOREL = "georel"; + public final static String QUERY_PARAMETER_GEOMETRY = "geometry"; + public final static String QUERY_PARAMETER_COORDINATES = "coordinates"; + public final static String QUERY_PARAMETER_GEOPROPERTY = "geoproperty"; + public final static String QUERY_PARAMETER_TIMEREL = "timerel"; + public final static String QUERY_PARAMETER_OFFSET = "offset"; + public final static String QUERY_PARAMETER_LIMIT = "limit"; + public final static String QUERY_PARAMETER_QTOKEN = "qtoken"; + public final static String QUERY_PARAMETER_TIME = "time"; + public final static String QUERY_PARAMETER_ENDTIME = "endTime"; + public final static String QUERY_PARAMETER_TIMEPROPERTY = "timeproperty"; + public final static String QUERY_PARAMETER_LOCATION = "location"; + public final static String QUERY_PARAMETER_CREATED_AT = "createdAt"; + public final static String QUERY_PARAMETER_MODIFIED_AT = "modifiedAt"; + public final static String QUERY_PARAMETER_OBSERVED_AT = "observedAt"; + public final static String QUERY_PARAMETER_UNIT_CODE = "unitCode"; + public final static String QUERY_PARAMETER_DATA_SET_ID = "datasetId"; + public final static String QUERY_PARAMETER_OBSERVATION_SPACE = "observationspace"; + public final static String QUERY_PARAMETER_OPERATION_SPACE = "operationspace"; + public final static String QUERY_PARAMETER_GEOREL_DISTANCE = "distance"; + public final static String QUERY_PARAMETER_DEFAULT_GEOPROPERTY = NGSIConstants.NGSI_LD_LOCATION; + public final static String QUERY_PARAMETER_DEFAULT_TIMEPROPERTY = NGSIConstants.NGSI_LD_OBSERVED_AT; + public final static String QUERY_PARAMETER_OPTIONS = "options"; + public final static String QUERY_PARAMETER_OPTIONS_SYSATTRS = "sysAttrs"; + public final static String QUERY_PARAMETER_OPTIONS_KEYVALUES = "keyValues"; + public final static String QUERY_PARAMETER_OPTIONS_COMPRESS = "compress"; + public final static String QUERY_PARAMETER_OPTIONS_TEMPORALVALUES = "temporalValues"; + + public final static String QUERY_EQUAL = "=="; + public final static String QUERY_UNEQUAL = "!="; + public final static String QUERY_GREATEREQ = ">="; + public final static String QUERY_GREATER = ">"; + public final static String QUERY_LESSEQ = "<="; + public final static String QUERY_LESS = "<"; + public final static String QUERY_PATTERNOP = "~="; + public final static String QUERY_NOTPATTERNOP = "!~="; + // public final static String CHECK_QUERY_STRING_URI = "/"; + + public static final String ALLOWED_IN_DEFAULT_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss[.SSSSSS][.SSSSS][.SSSS][.SSS][.SS][.S]'Z'"; + public static final String ALLOWED_OUT_DEFAULT_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss[.SSSSSS]'Z'"; + public static final String DEFAULT_FORGIVING_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss"; + public static final String HEADER_REL_LDCONTEXT = "http://www.w3.org/ns/json-ld#context"; + + public static final HashMap HTTP_CODE_2_NGSI_ERROR = new HashMap(); + public static final List ALLOWED_GEOMETRIES = Arrays.asList("Point", "MultiPoint", "LineString", + "MultiLineString", "Polygon", "MultiPolygon"); + public static final List ALLOWED_GEOREL = Arrays.asList("near", "equals", "disjoint", "intersects", + "within", "contains", "overlaps"); + public static final List SPECIAL_PROPERTIES = Arrays.asList(NGSI_LD_CREATED_AT, NGSI_LD_OBSERVED_AT, + NGSI_LD_MODIFIED_AT, NGSI_LD_DATA_SET_ID, NGSI_LD_UNIT_CODE); + public static final String MQTT_QOS = "mqtt_qos"; + public static final String MQTT_VERSION = "mqtt_version"; + public static final String DEFAULT_DATA_SET_ID = "https://uri.etsi.org/ngsi-ld/default-data-set-id"; + public static final String NGSI_LD_ENDPOINT_REGEX = ".*\\\"https\\:\\/\\/uri\\.etsi\\.org\\/ngsi-ld\\/endpoint\\\"\\W*\\:\\W*\\[\\W*\\{\\W*\\@value\\\"\\:\\W*\\\"(http(s)*\\:\\/\\/\\S*)\\\".*"; + public static final String NGSI_LD_FORBIDDEN_KEY_CHARS_REGEX = "([\\<\\\"\\'\\=\\;\\(\\)\\>\\?\\*])"; + public static final String NGSI_LD_FORBIDDEN_KEY_CHARS = "<,\",',=,;,(,),>,?,*"; + public static final String[] VALID_SUB_ENDPOINT_SCHEMAS = { "http", "https", "mqtt", "mqtts" }; + public final static String QUERY_PARAMETER_DELETE_ALL = "deleteAll"; + public static final String NGSI_LD_NOTIFIERINFO = "https://uri.etsi.org/ngsi-ld/default-context/notifierinfo"; + public static final String NGSI_LD_MQTT_QOS = "https://uri.etsi.org/ngsi-ld/default-context/qos"; + public static final String NGSI_LD_MQTT_VERSION = "https://uri.etsi.org/ngsi-ld/default-context/version"; + public static final Integer DEFAULT_MQTT_QOS = 0; + public static final String DEFAULT_MQTT_VERSION = "mqtt5.0"; + public static final String CONTENT_TYPE = "contentType"; + public static final String ACCEPTED_LINK = "link"; + public static final String METADATA = "metadata"; + public static final String BODY = "body"; + public static final String MQTT_VERSION_5 = "mqtt5.0"; + public static final String MQTT_VERSION_3 = "mqtt3.1.1"; + public static final String[] VALID_MQTT_VERSION = { "mqtt5.0", "mqtt3.1.1" }; + public static final Integer[] VALID_QOS = { 0, 1, 2 }; +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Append.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Append.java new file mode 100644 index 0000000000000000000000000000000000000000..50d0f8c39dd51352b14ae42371b1a43b521efd9a --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Append.java @@ -0,0 +1,24 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.util.List; +import java.util.Map; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class Append extends BaseEntityModificationOperation { + + + + public Append(Map customFlags, List data, URI id, LDContext ldContext) { + super(customFlags, data, id, ldContext); + } + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/AppendResult.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/AppendResult.java new file mode 100644 index 0000000000000000000000000000000000000000..10b35877daae30ea0a09de61cde8e3ec15691dec --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/AppendResult.java @@ -0,0 +1,72 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import com.fasterxml.jackson.databind.JsonNode; + +public class AppendResult { + + private JsonNode jsonToAppend; + private JsonNode appendedJsonFields;// = new ArrayList(); + private JsonNode finalNode; + private boolean status=false; + private byte[] json; + private byte[] jsonWithoutSysAttrs; + + public boolean getAppendResult() { + return jsonToAppend.size()==appendedJsonFields.size(); + } + + public AppendResult(JsonNode jsonToAppend,JsonNode appendedJsonFields) { + super(); + this.jsonToAppend = jsonToAppend; + this.appendedJsonFields=appendedJsonFields; + } + + public JsonNode getFinalNode() { + return finalNode; + } + + public void setFinalNode(JsonNode finalNode) { + this.finalNode = finalNode; + } + + public JsonNode getJsonToAppend() { + return jsonToAppend; + } + + public void setJsonToAppend(JsonNode jsonToAppend) { + this.jsonToAppend = jsonToAppend; + } + + public JsonNode getAppendedJsonFields() { + return appendedJsonFields; + } + + public void setAppendedJsonFields(JsonNode appendedJsonFields) { + this.appendedJsonFields = appendedJsonFields; + } + + public boolean getStatus() { + return status; + } + + public void setStatus(boolean status) { + this.status = status; + } + + public byte[] getJson() { + return json; + } + + public void setJson(byte[] json) { + this.json = json; + } + + public byte[] getJsonWithoutSysAttrs() { + return jsonWithoutSysAttrs; + } + + public void setJsonWithoutSysAttrs(byte[] jsonWithoutSysAttrs) { + this.jsonWithoutSysAttrs = jsonWithoutSysAttrs; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseEntityModificationOperation.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseEntityModificationOperation.java new file mode 100644 index 0000000000000000000000000000000000000000..90b9e2cad9d9a206a3eef314620252f8d43b5b40 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseEntityModificationOperation.java @@ -0,0 +1,72 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.util.List; +import java.util.Map; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class BaseEntityModificationOperation extends BaseOperation { + + private List data; + private URI id; + private LDContext ldContext; + + + + public BaseEntityModificationOperation(Map customFlags, List data, URI id, + LDContext ldContext) { + super(customFlags); + this.data = data; + this.id = id; + this.ldContext = ldContext; + } + + + + public void finalize() throws Throwable { + + } + + + + public List getData() { + return data; + } + + + + public void setData(List data) { + this.data = data; + } + + + + public URI getId() { + return id; + } + + + + public void setId(URI id) { + this.id = id; + } + + + + public LDContext getLdContext() { + return ldContext; + } + + + + public void setLdContext(LDContext ldContext) { + this.ldContext = ldContext; + } + + + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseEntry.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseEntry.java new file mode 100644 index 0000000000000000000000000000000000000000..85c81c4bbead1f114de1fa213e6ab4bb0fade796 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseEntry.java @@ -0,0 +1,122 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.List; +import java.util.UUID; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; + +public abstract class BaseEntry { + protected String name; + protected Long observedAt = -1l; + protected List properties; + private Object refToAccessControl; + protected List relationships; + protected int timeSeriesId; + protected String type; + private Long createdAt = -1l; + private Long modifiedAt = -1l; + protected String dataSetId; + + public BaseEntry(String dataSetId) { + this.dataSetId = dataSetId; + if (this.dataSetId == null || this.dataSetId.trim().isEmpty()) { + this.dataSetId = NGSIConstants.DEFAULT_DATA_SET_ID; + } + } + + public String getDataSetId() { + return dataSetId; + } + + public void setDataSetId(String dataSetId) { + this.dataSetId = dataSetId; + if (this.dataSetId == null || this.dataSetId.trim().isEmpty()) { + this.dataSetId = UUID.randomUUID().toString(); + } + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public long getObservedAt() { + return observedAt; + } + + public void setObservedAt(Long observedAt) { + if (observedAt == null) { + this.observedAt = -1l; + } else { + this.observedAt = observedAt; + } + } + + public List getProperties() { + return properties; + } + + public void setProperties(List properties) { + this.properties = properties; + } + + public Object getRefToAccessControl() { + return refToAccessControl; + } + + public void setRefToAccessControl(Object refToAccessControl) { + this.refToAccessControl = refToAccessControl; + } + + public List getRelationships() { + return relationships; + } + + public void setRelationships(List relationships) { + this.relationships = relationships; + } + + public int getTimeSeriesId() { + return timeSeriesId; + } + + public void setTimeSeriesId(int timeSeriesId) { + this.timeSeriesId = timeSeriesId; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public long getCreatedAt() { + return createdAt; + } + + public void setCreatedAt(Long createdAt) { + if (createdAt == null) { + this.createdAt = -1l; + } else { + this.createdAt = createdAt; + } + } + + public long getModifiedAt() { + return modifiedAt; + } + + public void setModifiedAt(Long modifiedAt) { + if (createdAt == null) { + this.modifiedAt = -1l; + } else { + this.modifiedAt = modifiedAt; + } + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseOperation.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseOperation.java new file mode 100644 index 0000000000000000000000000000000000000000..68f366cd99c99273c8f0bfcf7a3d16b3485b1f7d --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseOperation.java @@ -0,0 +1,38 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.Map; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class BaseOperation { + + protected Map customFlags; + + public BaseOperation() { + + } + + public BaseOperation(Map customFlags){ + this.customFlags = customFlags; + + } + + + public Map getCustomFlags() { + return customFlags; + } + + + public void setCustomFlags(Map customFlags) { + this.customFlags = customFlags; + } + + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseProperty.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseProperty.java new file mode 100644 index 0000000000000000000000000000000000000000..c86f9bc796fcb54e18e94cc66d84cb21a4e98faa --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseProperty.java @@ -0,0 +1,84 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.util.HashMap; +import java.util.List; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public abstract class BaseProperty { + + protected URI id; + protected String type; + + public BaseProperty(){ + + } + + + abstract public boolean isMultiValue(); + + public URI getId() { + return id; + } + public String getIdString() { + return id.toString(); + } + + public void setId(URI id) { + this.id = id; + } + + + public String getType() { + return type; + } + + + public void setType(String type) { + this.type = type; + } + + public abstract HashMap getEntries(); + + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((id == null) ? 0 : id.hashCode()); + result = prime * result + ((type == null) ? 0 : type.hashCode()); + return result; + } + + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + BaseProperty other = (BaseProperty) obj; + if (id == null) { + if (other.id != null) + return false; + } else if (!id.equals(other.id)) + return false; + if (type == null) { + if (other.type != null) + return false; + } else if (!type.equals(other.type)) + return false; + return true; + } + + + + + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseResult.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseResult.java new file mode 100644 index 0000000000000000000000000000000000000000..16c6a4062b6e94ce2b4901bce80207b35bc330f4 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BaseResult.java @@ -0,0 +1,91 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import eu.neclab.ngsildbroker.commons.enums.ErrorType; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class BaseResult { + + protected String errorMsg; + protected ErrorType errorType; + protected int shortErrorMsg; + protected boolean success; + + + + + public BaseResult(String errorMsg, ErrorType errorType, int shortErrorMsg, boolean success) { + super(); + this.errorMsg = errorMsg; + this.errorType = errorType; + this.shortErrorMsg = shortErrorMsg; + this.success = success; + } + + + + + public String getErrorMsg() { + return errorMsg; + } + + + + + public void setErrorMsg(String errorMsg) { + this.errorMsg = errorMsg; + } + + + + + public ErrorType getErrorType() { + return errorType; + } + + + + + public void setErrorType(ErrorType errorType) { + this.errorType = errorType; + } + + + + + public int getShortErrorMsg() { + return shortErrorMsg; + } + + + + + public void setShortErrorMsg(int shortErrorMsg) { + this.shortErrorMsg = shortErrorMsg; + } + + + + + public boolean isSuccess() { + return success; + } + + + + + public void setSuccess(boolean success) { + this.success = success; + } + + + + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BatchFailure.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BatchFailure.java new file mode 100644 index 0000000000000000000000000000000000000000..05954a717cb0c830f9d1d6d4e8dca3f4924bdd2f --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BatchFailure.java @@ -0,0 +1,29 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +public class BatchFailure { + private String entityId; + private RestResponse ProblemDetails; + + + public BatchFailure(String entityId, RestResponse details) { + super(); + this.entityId = entityId; + this.ProblemDetails = details; + } + public String getEntityId() { + return entityId; + } + public void setEntityId(String entityId) { + this.entityId = entityId; + } + public RestResponse getDetails() { + return ProblemDetails; + } + public void setDetails(RestResponse details) { + this.ProblemDetails = details; + } + + + + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BatchResult.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BatchResult.java new file mode 100644 index 0000000000000000000000000000000000000000..7da799525d3a85f4c7ecf36932e1e9a85feeb773 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/BatchResult.java @@ -0,0 +1,31 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.ArrayList; + +public class BatchResult { + + private ArrayList success = new ArrayList(); + private ArrayList fails = new ArrayList(); + + public void addSuccess(String entityId) { + success.add(entityId); + } + + public void addFail(BatchFailure fail) { + fails.add(fail); + } + + public ArrayList getSuccess() { + return success; + } + public ArrayList getFails() { + return fails; + } + + + + + + + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CSourceNotification.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CSourceNotification.java new file mode 100644 index 0000000000000000000000000000000000000000..c0a75b2e8da0f8f09011878c90468cb76729e9e3 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CSourceNotification.java @@ -0,0 +1,144 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.util.Date; +import java.util.List; + +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.enums.TriggerReason; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class CSourceNotification extends CSourceQueryResult { + + + private URI id; + private Date notifiedAt; + private TriggerReason triggerReason; + private String type = "ContextSource Notfication"; + private URI subscriptionId; + + + public CSourceNotification(URI id, URI subscriptionId, Date notifiedAt, TriggerReason triggerReason, List data, String errorMsg, ErrorType errorType, int shortErrorMsg, + boolean success) { + super(data, errorMsg, errorType, shortErrorMsg, success); + this.id = id; + this.notifiedAt = notifiedAt; + this.triggerReason = triggerReason; + this.subscriptionId = subscriptionId; + } + + + + + + + + public URI getSubscriptionId() { + return subscriptionId; + } + + + + + + + + public void setSubscriptionId(URI subscriptionId) { + this.subscriptionId = subscriptionId; + } + + + + + + + + public URI getId() { + return id; + } + + + + + + + + public void setId(URI id) { + this.id = id; + } + + + + + + + + public Date getNotifiedAt() { + return notifiedAt; + } + + + + + + + + public void setNotifiedAt(Date notifiedAt) { + this.notifiedAt = notifiedAt; + } + + + + + + + + public TriggerReason getTriggerReason() { + return triggerReason; + } + + + + + + + + public void setTriggerReason(TriggerReason triggerReason) { + this.triggerReason = triggerReason; + } + + + + + + + + public String getType() { + return type; + } + + + + + + + + public void setType(String type) { + this.type = type; + } + + + + + + + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CSourceQuery.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CSourceQuery.java new file mode 100644 index 0000000000000000000000000000000000000000..a2f3bd18ba19a663181936feffc9767d93e369f7 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CSourceQuery.java @@ -0,0 +1,27 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.util.List; +import java.util.Map; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class CSourceQuery extends Query { + + + + public CSourceQuery(Map customFlags, List attributeNames, List entities, + String ldContext, LDGeoQuery ldGeoQuery, String ldQuery, LDTemporalQuery ldTempQuery, + List requestorList) { + super(customFlags, attributeNames, entities, ldContext, ldGeoQuery, ldQuery, ldTempQuery, requestorList); + // TODO Auto-generated constructor stub + } + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CSourceQueryResult.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CSourceQueryResult.java new file mode 100644 index 0000000000000000000000000000000000000000..d7b80efd864fcd76ba5940ae81bc05838f1c43fd --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CSourceQueryResult.java @@ -0,0 +1,41 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.List; + +import eu.neclab.ngsildbroker.commons.enums.ErrorType; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class CSourceQueryResult extends BaseResult { + + public CSourceQueryResult(List data, String errorMsg, ErrorType errorType, int shortErrorMsg, boolean success) { + super(errorMsg, errorType, shortErrorMsg, success); + this.data = data; + } + + + + public List getData() { + return data; + } + + + + public void setData(List data) { + this.data = data; + } + + + + private List data; + + + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CSourceRegistration.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CSourceRegistration.java new file mode 100644 index 0000000000000000000000000000000000000000..2416aac0d648b22422dde9633ae2fbe4b13e99e7 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CSourceRegistration.java @@ -0,0 +1,236 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.util.Date; +import java.util.List; +import com.github.filosganga.geogson.model.Geometry; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class CSourceRegistration { + + private String description; + private URI endpoint; + private Long expires; + private URI id; + + private List information; + //private GeoProperty location; + private Geometry location; // csource location is not the same as entity location. entity location is a GeoProperty, csource location is just a geojson value (string) + private String name; + private TimeInterval timestamp; + private boolean isInternal = false; + + /** + * a + */ + private String type = "CSourceRegistration"; + + public CSourceRegistration() { + + } + + public void finalize() throws Throwable { + + } + + public CSourceRegistration update(CSourceRegistration updateBean) throws Exception { + if (updateBean == null) { + throw new Exception("null update payload."); + } + if (updateBean.getDescription() != null) { + this.setDescription(updateBean.getDescription()); + } + if (updateBean.getEndpoint() != null) { + this.setEndpoint(updateBean.getEndpoint()); + } + if (updateBean.getExpires() != null) { + this.setExpires(updateBean.getExpires()); + } + if (updateBean.getId() != null) { + this.setId(updateBean.getId()); + } + if (updateBean.getInformation() != null) { + // TODO : support for updating nested element i.e EntityInfo. + // this.getInformation().addAll(updateBean.getInformation()); + this.setInformation(updateBean.getInformation()); + } + if (updateBean.getLocation() != null) { + this.setLocation(updateBean.getLocation()); + } + if (updateBean.getName() != null) { + this.setName(updateBean.getName()); + } + if (updateBean.getTimestamp() != null) { + this.setTimestamp(updateBean.getTimestamp()); + } + + return this; + } + + + + public boolean isInternal() { + return isInternal; + } + + public void setInternal(boolean isInternal) { + this.isInternal = isInternal; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public URI getEndpoint() { + return endpoint; + } + + public void setEndpoint(URI endpoint) { + this.endpoint = endpoint; + } + + public Long getExpires() { + return expires; + } + + public void setExpires(Long expires) { + this.expires = expires; + } + + public URI getId() { + return id; + } + + public void setId(URI id) { + this.id = id; + } + + public List getInformation() { + return information; + } + + public void setInformation(List information) { + this.information = information; + } + + public Geometry getLocation() { + return location; + } + + public void setLocation(Geometry location) { + this.location = location; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public TimeInterval getTimestamp() { + return timestamp; + } + + public void setTimestamp(TimeInterval timestamp) { + this.timestamp = timestamp; + } + + @Override + public String toString() { + return "CSourceRegistration [description=" + description + ", endpoint=" + endpoint + ", expires=" + expires + + ", id=" + id + ", information=" + information + ", location=" + location + ", name=" + name + + ", timestamp=" + timestamp + ", type=" + type + "]"; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((description == null) ? 0 : description.hashCode()); + result = prime * result + ((endpoint == null) ? 0 : endpoint.hashCode()); + result = prime * result + ((expires == null) ? 0 : expires.hashCode()); + result = prime * result + ((id == null) ? 0 : id.hashCode()); + result = prime * result + ((information == null) ? 0 : information.hashCode()); + result = prime * result + ((location == null) ? 0 : location.hashCode()); + result = prime * result + ((name == null) ? 0 : name.hashCode()); + result = prime * result + ((timestamp == null) ? 0 : timestamp.hashCode()); + result = prime * result + ((type == null) ? 0 : type.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + CSourceRegistration other = (CSourceRegistration) obj; + if (description == null) { + if (other.description != null) + return false; + } else if (!description.equals(other.description)) + return false; + if (endpoint == null) { + if (other.endpoint != null) + return false; + } else if (!endpoint.equals(other.endpoint)) + return false; + if (expires == null) { + if (other.expires != null) + return false; + } else if (!expires.equals(other.expires)) + return false; + if (id == null) { + if (other.id != null) + return false; + } else if (!id.equals(other.id)) + return false; + if (information == null) { + if (other.information != null) + return false; + } else if (!information.equals(other.information)) + return false; + if (location == null) { + if (other.location != null) + return false; + } else if (!location.equals(other.location)) + return false; + if (name == null) { + if (other.name != null) + return false; + } else if (!name.equals(other.name)) + return false; + if (timestamp == null) { + if (other.timestamp != null) + return false; + } else if (!timestamp.equals(other.timestamp)) + return false; + if (type == null) { + if (other.type != null) + return false; + } else if (!type.equals(other.type)) + return false; + return true; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Create.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Create.java new file mode 100644 index 0000000000000000000000000000000000000000..8a4baebed40adba0ed28bd60d0a48503ab61de1d --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Create.java @@ -0,0 +1,52 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.Map; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class Create extends BaseOperation { + + private Entity data; + private LDContext ldContext; + + + + + + + + public Create(Map customFlags, Entity data, LDContext ldContext) { + super(customFlags); + this.data = data; + this.ldContext = ldContext; + } + + + public Entity getData() { + return data; + } + + + public void setData(Entity data) { + this.data = data; + } + + + public LDContext getLdContext() { + return ldContext; + } + + + public void setLdContext(LDContext ldContext) { + this.ldContext = ldContext; + } + + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CreateResult.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CreateResult.java new file mode 100644 index 0000000000000000000000000000000000000000..772f0953544519db7389eeabe0723c27a8be732c --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/CreateResult.java @@ -0,0 +1,39 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import eu.neclab.ngsildbroker.commons.enums.ErrorType; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class CreateResult extends BaseResult { + + private Entity data; + + + + public CreateResult(Entity data, String errorMsg, ErrorType errorType, int shortErrorMsg, boolean success) { + super(errorMsg, errorType, shortErrorMsg, success); + this.data = data; + } + + + + public Entity getData() { + return data; + } + + + + public void setData(Entity data) { + this.data = data; + } + + + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Delete.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Delete.java new file mode 100644 index 0000000000000000000000000000000000000000..3bb5492e40ac7210156c1b851a51411f31a9dd82 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Delete.java @@ -0,0 +1,25 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.util.List; +import java.util.Map; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class Delete extends BaseEntityModificationOperation { + + + + public Delete(Map customFlags, List data, URI id, LDContext ldContext) { + super(customFlags, data, id, ldContext); + + } + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/DeleteResult.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/DeleteResult.java new file mode 100644 index 0000000000000000000000000000000000000000..3865c179e1b9dde5871d212a8a8041f94d69e582 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/DeleteResult.java @@ -0,0 +1,40 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import eu.neclab.ngsildbroker.commons.enums.ErrorType; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class DeleteResult extends BaseResult { + + public DeleteResult(Entity data, String errorMsg, ErrorType errorType, int shortErrorMsg, boolean success) { + super(errorMsg, errorType, shortErrorMsg, success); + this.data = data; + + } + + + + private Entity data; + + + + public Entity getData() { + return data; + } + + + + public void setData(Entity data) { + this.data = data; + } + + + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/EndPoint.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/EndPoint.java new file mode 100644 index 0000000000000000000000000000000000000000..7dfaa47ee785e332d09d804af785489f390f6e74 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/EndPoint.java @@ -0,0 +1,49 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.util.Map; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class EndPoint { + + private String accept; + private URI uri; + private MapnotifierInfo; + + public EndPoint(){ + + } + + public void finalize() throws Throwable { + + } + + public String getAccept() { + return accept; + } + + public void setAccept(String accept) { + this.accept = accept; + } + + public URI getUri() { + return uri; + } + + public void setUri(URI uri) { + this.uri = uri; + } + + public Map getNotifierInfo() { + return notifierInfo; + } + + public void setNotifierInfo(Map notifierInfo) { + this.notifierInfo = notifierInfo; + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Entity.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Entity.java new file mode 100644 index 0000000000000000000000000000000000000000..1ca06229d1678d3889ae3c0f039ebb280c3dbe63 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Entity.java @@ -0,0 +1,282 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.List; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:32:45 + */ +public class Entity { + + private URI id; + private GeoProperty location; + private GeoProperty observationSpace; + private GeoProperty operationSpace; + private List properties; + private Object refToAccessControl; + private List relationships; + private List geoProperties; + private String type; + private Long createdAt; + private Long modifiedAt; + private Long observedAt; + private String name; + private List allBaseProperties = new ArrayList(); + private Property createdAtProp = new Property(); + private Property modifiedAtProp = new Property(); + private Property observedAtProp = new Property(); + + public Entity(URI id, String type, List baseProps, Object refToAccessControl) { + this.id = id; + this.type = type; + this.refToAccessControl = refToAccessControl; + this.allBaseProperties = baseProps; + relationships = new ArrayList(); + properties = new ArrayList(); + geoProperties = new ArrayList(); + for (BaseProperty baseProp : baseProps) { + if (baseProp instanceof GeoProperty) { + if (baseProp.id.toString().equals(NGSIConstants.NGSI_LD_LOCATION)) { + this.location = (GeoProperty) baseProp; + } else if (baseProp.id.toString().equals(NGSIConstants.NGSI_LD_OBSERVATION_SPACE)) { + this.observationSpace = (GeoProperty) baseProp; + } else if (baseProp.id.toString().equals(NGSIConstants.NGSI_LD_OPERATION_SPACE)) { + this.operationSpace = (GeoProperty) baseProp; + } else { + this.geoProperties.add((GeoProperty) baseProp); + } + } else if (baseProp instanceof Relationship) { + this.relationships.add((Relationship) baseProp); + } else if (baseProp instanceof Property) { + if (baseProp.id.toString().equals(NGSIConstants.NGSI_LD_CREATED_AT)) { + if (((Property) baseProp).getEntries() != null) { + createdAtProp = (Property) baseProp; + createdAt = (Long) createdAtProp.getEntries().values().iterator().next().getValue(); + } + + } else if (baseProp.id.toString().equals(NGSIConstants.NGSI_LD_MODIFIED_AT)) { + if (((Property) baseProp).getEntries() != null) { + modifiedAtProp = (Property) baseProp; + modifiedAt = (Long) modifiedAtProp.getEntries().values().iterator().next().getValue(); + } + + } else if (baseProp.id.toString().equals(NGSIConstants.NGSI_LD_OBSERVED_AT)) { + if (((Property) baseProp).getEntries() != null) { + observedAtProp = (Property) baseProp; + observedAt = (Long) observedAtProp.getEntries().values().iterator().next().getValue(); + } + } else { + this.properties.add((Property) baseProp); + } + + } + } + } + + public Entity(URI id, GeoProperty location, GeoProperty observationSpace, GeoProperty operationSpace, + List properties, Object refToAccessControl, List relationships, String type, + List geoProperties) { + super(); + this.id = id; + this.location = location; + this.observationSpace = observationSpace; + this.operationSpace = operationSpace; + this.properties = properties; + this.refToAccessControl = refToAccessControl; + this.relationships = relationships; + this.geoProperties = geoProperties; + this.type = type; + try { + createdAtProp.setId(new URI(NGSIConstants.NGSI_LD_CREATED_AT)); + modifiedAtProp.setId(new URI(NGSIConstants.NGSI_LD_MODIFIED_AT)); + observedAtProp.setId(new URI(NGSIConstants.NGSI_LD_OBSERVED_AT)); + createdAtProp.setEntries(null); + modifiedAtProp.setEntries(null); + observedAtProp.setEntries(null); + allBaseProperties.add(createdAtProp); + allBaseProperties.add(modifiedAtProp); + allBaseProperties.add(observedAtProp); + } catch (URISyntaxException e) { + e.printStackTrace(); + } + if (properties != null) { + allBaseProperties.addAll(properties); + } + if (relationships != null) { + allBaseProperties.addAll(relationships); + } + if (geoProperties != null) { + allBaseProperties.addAll(geoProperties); + } + if (location != null) { + allBaseProperties.add(location); + } + if (observationSpace != null) { + allBaseProperties.add(observationSpace); + } + if (operationSpace != null) { + allBaseProperties.add(operationSpace); + } + + } + + public Long getObservedAt() { + return observedAt; + } + + public void setObservedAt(Long observedAt) { + this.observedAt = observedAt; + observedAtProp.setSingleEntry(new PropertyEntry("observedAt", observedAt)); + } + + + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public URI getId() { + return id; + } + + public void setId(URI id) { + this.id = id; + } + + public GeoProperty getLocation() { + return location; + } + + public void setLocation(GeoProperty location) { + if (this.location != null) { + allBaseProperties.remove(this.location); + } + allBaseProperties.add(location); + this.location = location; + } + + public GeoProperty getObservationSpace() { + return observationSpace; + } + + public void setObservationSpace(GeoProperty observationSpace) { + if (this.observationSpace != null) { + allBaseProperties.remove(this.observationSpace); + } + allBaseProperties.add(observationSpace); + this.observationSpace = observationSpace; + } + + public GeoProperty getOperationSpace() { + return operationSpace; + } + + public void setOperationSpace(GeoProperty operationSpace) { + if (this.operationSpace != null) { + allBaseProperties.remove(this.operationSpace); + } + allBaseProperties.add(operationSpace); + this.operationSpace = operationSpace; + } + + public List getProperties() { + return properties; + } + + public void setProperties(List properties) { + if (this.properties != null) { + allBaseProperties.removeAll(this.properties); + } + allBaseProperties.addAll(properties); + + this.properties = properties; + } + + public Object getRefToAccessControl() { + return refToAccessControl; + } + + public void setRefToAccessControl(Object refToAccessControl) { + this.refToAccessControl = refToAccessControl; + } + + public List getRelationships() { + return relationships; + } + + public void setRelationships(List relationships) { + if (this.relationships != null) { + allBaseProperties.removeAll(this.relationships); + } + if (relationships != null) { + allBaseProperties.addAll(relationships); + } + this.relationships = relationships; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public void finalize() throws Throwable { + + } + + public Long getCreatedAt() { + return createdAt; + } + + public void setCreatedAt(Long createdAt) { + createdAtProp.setSingleEntry(new PropertyEntry("createdAt", createdAt)); + this.createdAt = createdAt; + } + + public Long getModifiedAt() { + return modifiedAt; + } + + public void setModifiedAt(Long modifiedAt) { + modifiedAtProp.setSingleEntry(new PropertyEntry("modifiedAt", modifiedAt)); + this.modifiedAt = modifiedAt; + } + + public List getAllBaseProperties() { + return allBaseProperties; + } + + public List getGeoProperties() { + return geoProperties; + } + + public void setGeoProperties(List geoProperties) { + if (this.geoProperties != null) { + allBaseProperties.removeAll(this.geoProperties); + } + if (geoProperties != null) { + allBaseProperties.addAll(geoProperties); + } + this.geoProperties = geoProperties; + } + + @Override + public String toString() { + return "Entity [id=" + id + ", location=" + location + ", observationSpace=" + observationSpace + + ", operationSpace=" + operationSpace + ", properties=" + properties + ", refToAccessControl=" + + refToAccessControl + ", relationships=" + relationships + ", type=" + type + "]"; + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/EntityDetails.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/EntityDetails.java new file mode 100644 index 0000000000000000000000000000000000000000..4d782b63f6fb4facedec0d3bd37d280d3cda91db --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/EntityDetails.java @@ -0,0 +1,81 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +public class EntityDetails { + + private String key; + private int partition; + private long offset; + + public EntityDetails() {} + + + public EntityDetails(String key, int partition, long offset) { + super(); + this.key = key; + this.partition = partition; + this.offset = offset; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public int getPartition() { + return partition; + } + + public void setPartition(int partition) { + this.partition = partition; + } + + public long getOffset() { + return offset; + } + + public void setOffset(long offset) { + this.offset = offset; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((key == null) ? 0 : key.hashCode()); + result = prime * result + (int) (offset ^ (offset >>> 32)); + result = prime * result + partition; + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + EntityDetails other = (EntityDetails) obj; + if (key == null) { + if (other.key != null) + return false; + } else if (!key.equals(other.key)) + return false; + if (offset != other.offset) + return false; + if (partition != other.partition) + return false; + return true; + } + + + @Override + public String toString() { + return "EntityTopicDetails [key=" + key + ", partition=" + partition + ", offset=" + offset + "]"; + } + + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/EntityInfo.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/EntityInfo.java new file mode 100644 index 0000000000000000000000000000000000000000..16d32949218889585f13416fde2fdd9f76d54f03 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/EntityInfo.java @@ -0,0 +1,74 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:34:28 + */ +public class EntityInfo { + + private URI id; + private String idPattern; + private String type; + + public EntityInfo() { + + } + + public EntityInfo(URI id, String idPattern, String type) { + super(); + this.id = id; + this.idPattern = idPattern; + this.type = type; + } + + + + public URI getId() { + return id; + } + + + + public void setId(URI id) { + this.id = id; + } + + + + public String getIdPattern() { + return idPattern; + } + + + + public void setIdPattern(String idPattern) { + this.idPattern = idPattern; + } + + + + public String getType() { + return type; + } + + + + public void setType(String type) { + this.type = type; + } + + + + public void finalize() throws Throwable { + + } + + @Override + public String toString() { + return "EntityInfo [id=" + id + ", idPattern=" + idPattern + ", type=" + type + "]"; + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoProperty.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoProperty.java new file mode 100644 index 0000000000000000000000000000000000000000..12dedc4dcd3d6be1b3e934326176b22fdf33fc1c --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoProperty.java @@ -0,0 +1,69 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.HashMap; + +import com.github.filosganga.geogson.model.Geometry; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class GeoProperty extends BaseProperty { + + + HashMap entries = new HashMap(); + + public GeoProperty(){ +// this.type = "GeoProperty"; + } + + public void finalize() throws Throwable { + + } + + + + + public HashMap getEntries() { + return entries; + } + + public void setEntries(HashMap entries) { + this.entries = entries; + } + + + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + ((entries == null) ? 0 : entries.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (!super.equals(obj)) + return false; + if (getClass() != obj.getClass()) + return false; + GeoProperty other = (GeoProperty) obj; + if (entries == null) { + if (other.entries != null) + return false; + } else if (!entries.equals(other.entries)) + return false; + return true; + } + + @Override + public boolean isMultiValue() { + return entries.size() != 1; + } + + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoPropertyEntry.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoPropertyEntry.java new file mode 100644 index 0000000000000000000000000000000000000000..1c016ee52ec647931f3b3394d48e695ce66f374f --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoPropertyEntry.java @@ -0,0 +1,35 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import com.github.filosganga.geogson.model.Geometry; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; + +public class GeoPropertyEntry extends BaseEntry { + public GeoPropertyEntry(String dataSetId, String value, Geometry geoValue) { + super(dataSetId); + this.value = value; + this.geoValue = geoValue; + this.type = NGSIConstants.NGSI_LD_GEOPROPERTY; + } + + private String value; + + private Geometry geoValue; + + public String getValue() { + return value; + } + + public void setValue(String string) { + this.value = string; + } + + public Geometry getGeoValue() { + return geoValue; + } + + public void setGeoValue(Geometry geoValue) { + this.geoValue = geoValue; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoRelation.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoRelation.java new file mode 100644 index 0000000000000000000000000000000000000000..908cbabd4bc501facf11d29db4b655774f847d05 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoRelation.java @@ -0,0 +1,86 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class GeoRelation { + + private Object maxDistance; + private Double maxDistanceAsDouble; + private Object minDistance; + private Double minDistanceAsDouble; + private String relation; + + public GeoRelation() { + + } + + public void finalize() throws Throwable { + + } + + public Object getMaxDistance() { + return maxDistance; + } + + + public Double getMaxDistanceAsDouble() { + return maxDistanceAsDouble; + } + + public Double getMinDistanceAsDouble() { + return minDistanceAsDouble; + } + + public void setMaxDistance(Object maxDistance) { + this.maxDistance = maxDistance; + if (maxDistance instanceof Integer) { + maxDistanceAsDouble = ((Integer) maxDistance).doubleValue(); + } else { + maxDistanceAsDouble = (Double) maxDistance; + } + } + + public Object getMinDistance() { + return minDistance; + } + + public void setMinDistance(Object minDistance) { + this.minDistance = minDistance; + if (minDistance instanceof Integer) { + minDistanceAsDouble = ((Integer) minDistance).doubleValue(); + } else { + minDistanceAsDouble = (Double) minDistance; + } + } + + public String getRelation() { + return relation; + } + + public void setRelation(String relation) { + this.relation = relation; + } + + public String getABNFString() { + StringBuilder result = new StringBuilder(); + result.append(relation); + if (maxDistance != null) { + result.append(";"); + result.append("maxDistance"); + result.append("=="); + result.append(maxDistance); + } + if (minDistance != null) { + result.append(";"); + result.append("minDistance"); + result.append("=="); + result.append(minDistance); + } + return result.toString(); + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoValue.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoValue.java new file mode 100644 index 0000000000000000000000000000000000000000..e7196b06ed8868e7d5636c4ab00b1126d2f94690 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoValue.java @@ -0,0 +1,53 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.List; + +public class GeoValue { + + String type; + List coordinates; + public String getType() { + return type; + } + public void setType(String type) { + this.type = type; + } + public List getCoordinates() { + return coordinates; + } + public void setCoordinates(List coordinates) { + this.coordinates = coordinates; + } + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((coordinates == null) ? 0 : coordinates.hashCode()); + result = prime * result + ((type == null) ? 0 : type.hashCode()); + return result; + } + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + GeoValue other = (GeoValue) obj; + if (coordinates == null) { + if (other.coordinates != null) + return false; + } else if (!coordinates.equals(other.coordinates)) + return false; + if (type == null) { + if (other.type != null) + return false; + } else if (!type.equals(other.type)) + return false; + return true; + } + + + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoqueryRel.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoqueryRel.java new file mode 100644 index 0000000000000000000000000000000000000000..6ea9ebb5da685104d31aeff9b2d5649b89c0f3f8 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/GeoqueryRel.java @@ -0,0 +1,49 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; + +public class GeoqueryRel { + private String georelOp = null; + private String distanceType = null; + private String distanceValue = null; + + + public GeoqueryRel() { + super(); + } + + + public GeoqueryRel(GeoRelation georel) { + super(); + this.georelOp = georel.getRelation(); + if(georel.getMaxDistance() != null && georel.getMaxDistanceAsDouble() > 0) { + this.distanceType = NGSIConstants.GEO_REL_MAX_DISTANCE; + this.distanceValue = "" + georel.getMaxDistance(); + }else if(georel.getMinDistance() != null && georel.getMinDistanceAsDouble() > 0) { + this.distanceType = NGSIConstants.GEO_REL_MIN_DISTANCE; + this.distanceValue = "" + georel.getMinDistance(); + } + } + + + public String getGeorelOp() { + return georelOp; + } + public void setGeorelOp(String georelOp) { + this.georelOp = georelOp; + } + public String getDistanceType() { + return distanceType; + } + public void setDistanceType(String distanceType) { + this.distanceType = distanceType; + } + public String getDistanceValue() { + return distanceValue; + } + public void setDistanceValue(String distanceValue) { + this.distanceValue = distanceValue; + } + + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Information.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Information.java new file mode 100644 index 0000000000000000000000000000000000000000..cb93ce19a644e97fa732a38472d4dd0dea7cc429 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Information.java @@ -0,0 +1,86 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class Information { + + private List entities; + private Set properties; + private Set relationships; + + public Information() { + this.entities=new ArrayList(); + this.properties=new HashSet(); + this.relationships=new HashSet(); + } + public List getEntities() { + return entities; + } + + public void setEntities(List entities) { + this.entities = entities; + } + + public Set getProperties() { + return properties; + } + + public void setProperties(Set properties) { + this.properties = properties; + } + + public Set getRelationships() { + return relationships; + } + + public void setRelationships(Set relationships) { + this.relationships = relationships; + } + + @Override + public String toString() { + return "Information [entities=" + entities + ", properties=" + properties + ", relationships=" + relationships + + "]"; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((entities == null) ? 0 : entities.hashCode()); + result = prime * result + ((properties == null) ? 0 : properties.hashCode()); + result = prime * result + ((relationships == null) ? 0 : relationships.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + Information other = (Information) obj; + if (entities == null) { + if (other.entities != null) + return false; + } else if (!entities.equals(other.entities)) + return false; + if (properties == null) { + if (other.properties != null) + return false; + } else if (!properties.equals(other.properties)) + return false; + if (relationships == null) { + if (other.relationships != null) + return false; + } else if (!relationships.equals(other.relationships)) + return false; + return true; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/LDContext.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/LDContext.java new file mode 100644 index 0000000000000000000000000000000000000000..e501a303f274edf8af1fc41671174cd0e6607fd6 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/LDContext.java @@ -0,0 +1,19 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class LDContext { + + public LDContext(){ + + } + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/LDGeoQuery.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/LDGeoQuery.java new file mode 100644 index 0000000000000000000000000000000000000000..859c1ecb6476ae191813f9cfebbc559fe9e50513 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/LDGeoQuery.java @@ -0,0 +1,88 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.List; + +import eu.neclab.ngsildbroker.commons.enums.Geometry; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class LDGeoQuery { + + private List coordinates; + private Geometry geometry; + private String geoProperty = "location"; + private GeoRelation geoRelation; +// private boolean nextAnd = true; +// private LDGeoQuery next; + + public LDGeoQuery(){ + + } + + + +// public boolean isNextAnd() { +// return nextAnd; +// } +// +// +// +// public void setNextAnd(boolean nextAnd) { +// this.nextAnd = nextAnd; +// } +// +// +// +// public LDGeoQuery getNext() { +// return next; +// } +// +// +// +// public void setNext(LDGeoQuery next) { +// this.next = next; +// } + + + + public void finalize() throws Throwable { + + } + + public List getCoordinates() { + return coordinates; + } + + public void setCoordinates(List coordinates) { + this.coordinates = coordinates; + } + + public Geometry getGeometry() { + return geometry; + } + + public void setGeometry(Geometry geometry) { + this.geometry = geometry; + } + + public String getGeoProperty() { + return geoProperty; + } + + public void setGeoProperty(String geoProperty) { + this.geoProperty = geoProperty; + } + + public GeoRelation getGeoRelation() { + return geoRelation; + } + + public void setGeoRelation(GeoRelation geoRelation) { + this.geoRelation = geoRelation; + } + + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/LDQuery.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/LDQuery.java new file mode 100644 index 0000000000000000000000000000000000000000..8b018844d35fbc18232d7976d588b642c09dc87c --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/LDQuery.java @@ -0,0 +1,19 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class LDQuery { + + public LDQuery(){ + + } + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/LDTemporalQuery.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/LDTemporalQuery.java new file mode 100644 index 0000000000000000000000000000000000000000..22328c159385a1c0b05e5424850ee7f667802d8a --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/LDTemporalQuery.java @@ -0,0 +1,60 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.Date; + +import eu.neclab.ngsildbroker.commons.enums.TemporalRelation; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class LDTemporalQuery { + + private Date endTime; + private TemporalRelation temprel; + private Date time; + private String timeProperty = "observedAt"; + + public LDTemporalQuery(){ + + } + + public void finalize() throws Throwable { + + } + + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } + + public TemporalRelation getTemprel() { + return temprel; + } + + public void setTemprel(TemporalRelation temprel) { + this.temprel = temprel; + } + + public Date getTime() { + return time; + } + + public void setTime(Date time) { + this.time = time; + } + + public String getTimeProperty() { + return timeProperty; + } + + public void setTimeProperty(String timeProperty) { + this.timeProperty = timeProperty; + } + + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Location.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Location.java new file mode 100644 index 0000000000000000000000000000000000000000..66892e9318f8cadf14a8121661dab29ef3dd757a --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Location.java @@ -0,0 +1,39 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class Location { + + private double latitude; + private double longitude; + + public Location(){ + + } + + public void finalize() throws Throwable { + + } + + public double getLatitude() { + return latitude; + } + + public void setLatitude(double latitude) { + this.latitude = latitude; + } + + public double getLongitude() { + return longitude; + } + + public void setLongitude(double longitude) { + this.longitude = longitude; + } + + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Notification.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Notification.java new file mode 100644 index 0000000000000000000000000000000000000000..3ffad2b9532e3892baf9ac612c76d46c33ebba1c --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Notification.java @@ -0,0 +1,99 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.util.Date; +import java.util.List; + +import eu.neclab.ngsildbroker.commons.enums.ErrorType; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class Notification extends QueryResult { + + + public Notification(URI id, Long notifiedAt, URI subscriptionId, List data) { + super(null, null, null, -1, true); + this.id = id; + this.notifiedAt = notifiedAt; + this.subscriptionId = subscriptionId; + this.data = data; + } + + + + public Notification(URI id, Long notifiedAt, URI subscriptionId, List data, String errorMsg, ErrorType errorType, int shortErrorMsg, boolean success) { + super(null, errorMsg, errorType, shortErrorMsg, success); + this.id = id; + this.notifiedAt = notifiedAt; + this.subscriptionId = subscriptionId; + this.data = data; + } + + + + private URI id; + private Long notifiedAt; + private URI subscriptionId; + private List data; + private final String type = "Notification"; + + + + public URI getId() { + return id; + } + + + + public void setId(URI id) { + this.id = id; + } + + + + public Long getNotifiedAt() { + return notifiedAt; + } + + + + public void setNotifiedAt(Long notifiedAt) { + this.notifiedAt = notifiedAt; + } + + + + public URI getSubscriptionId() { + return subscriptionId; + } + + + + public void setSubscriptionId(URI subscriptionId) { + this.subscriptionId = subscriptionId; + } + + + + public String getType() { + return type; + } + + + public List getData() { + return data; + } + + public void setData(List data) { + this.data = data; + } + + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/NotificationParam.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/NotificationParam.java new file mode 100644 index 0000000000000000000000000000000000000000..1791964d7927fa324af654392ed2bf484fe61ac5 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/NotificationParam.java @@ -0,0 +1,120 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.Date; +import java.util.List; + +import eu.neclab.ngsildbroker.commons.enums.Format; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class NotificationParam { + + private List attributeNames; + private EndPoint endPoint; + private Format format; + + private int timesSent = 0; + private Date lastNotification; + private Date lastSuccessfulNotification; + private Date lastFailedNotification; + + + + + public NotificationParam(){ + + } + + + + public int getTimesSent() { + return timesSent; + } + + + + + + + public Date getLastNotification() { + return lastNotification; + } + + public void setTimesSent(int timeSent) { + this.timesSent = timeSent; + } + + public void setLastNotification(Date lastNotification) { + this.timesSent++; + this.lastNotification = lastNotification; + } + + + + public Date getLastSuccessfulNotification() { + return lastSuccessfulNotification; + } + + + + public void setLastSuccessfulNotification(Date lastSuccessfulNotification) { + this.lastSuccessfulNotification = lastSuccessfulNotification; + } + + + + public Date getLastFailedNotification() { + return lastFailedNotification; + } + + + + public void setLastFailedNotification(Date lastFailedNotification) { + this.lastFailedNotification = lastFailedNotification; + } + + + + public void finalize() throws Throwable { + + } + + public List getAttributeNames() { + return attributeNames; + } + + public void setAttributeNames(List attributeNames) { + this.attributeNames = attributeNames; + } + + public EndPoint getEndPoint() { + return endPoint; + } + + public void setEndPoint(EndPoint endPoint) { + this.endPoint = endPoint; + } + + public Format getFormat() { + return format; + } + + public void setFormat(Format format) { + this.format = format; + } + + + + @Override + public String toString() { + return "NotificationParam [attributeNames=" + attributeNames + ", endPoint=" + endPoint + ", format=" + format + + ", timesSent=" + timesSent + ", lastNotification=" + lastNotification + + ", lastSuccessfulNotification=" + lastSuccessfulNotification + ", lastFailedNotification=" + + lastFailedNotification + "]"; + } + + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Property.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Property.java new file mode 100644 index 0000000000000000000000000000000000000000..1e2ee640f63c432b08f9f9b1e493c322327af054 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Property.java @@ -0,0 +1,76 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class Property extends BaseProperty { + + + + + private HashMap dataSetId2value; + public Property(){ + type = "Property"; + } + + public void finalize() throws Throwable { + + } + + public void setSingleEntry(PropertyEntry value) { + HashMap temp = new HashMap(); + temp.put(value.getDataSetId(), value); + setEntries(temp); + } + + public HashMap getEntries() { + return dataSetId2value; + } + + public void setEntries(HashMap value) { + this.dataSetId2value = value; + } + + + + + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + ((dataSetId2value == null) ? 0 : dataSetId2value.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (!super.equals(obj)) + return false; + if (getClass() != obj.getClass()) + return false; + Property other = (Property) obj; + if (dataSetId2value == null) { + if (other.dataSetId2value != null) + return false; + } else if (!dataSetId2value.equals(other.dataSetId2value)) + return false; + return true; + } + + @Override + public boolean isMultiValue() { + return dataSetId2value.size() != 1; + } + + + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/PropertyEntry.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/PropertyEntry.java new file mode 100644 index 0000000000000000000000000000000000000000..11a8d4b72c82b4003ebe2c697ac7137685259b79 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/PropertyEntry.java @@ -0,0 +1,36 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.UUID; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; + +public class PropertyEntry extends BaseEntry{ + + private Object value; + private String unitCode; + + + public PropertyEntry(String dataSetId, Object value) { + super(dataSetId); + this.value = value; + this.type = NGSIConstants.NGSI_LD_PROPERTY; + } + + public Object getValue() { + return value; + } + public void setValue(Object value) { + this.value = value; + } + public String getUnitCode() { + return unitCode; + } + + public void setUnitCode(String unitCode) { + this.unitCode = unitCode; + } + + + + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Query.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Query.java new file mode 100644 index 0000000000000000000000000000000000000000..ad27163fa041419d9e3ebd6fa3510569260f4b09 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Query.java @@ -0,0 +1,141 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class Query extends BaseOperation { + + + public Query() { + super(); + } + + public Query(Map customFlags, List attributeNames, List entities, + String ldContext, LDGeoQuery ldGeoQuery, String ldQuery, LDTemporalQuery ldTempQuery, + List requestorList) { + super(customFlags); + this.attributeNames = attributeNames; + if(this.attributeNames == null) { + this.attributeNames = new ArrayList(); + } + + this.entities = entities; + this.ldContext = ldContext; + this.ldGeoQuery = ldGeoQuery; + this.ldQuery = ldQuery; + this.ldTempQuery = ldTempQuery; + this.requestorList = requestorList; + } + + + protected List attributeNames; + protected List entities = new ArrayList(); + protected String ldContext; + protected LDGeoQuery ldGeoQuery; + protected String ldQuery; + protected LDTemporalQuery ldTempQuery; + protected List requestorList; + + + + public List getAttributeNames() { + return attributeNames; + } + + + + public void setAttributeNames(List attributeNames) { + this.attributeNames = attributeNames; + } + + + + public List getEntities() { + return entities; + } + + + + public void setEntities(List entities) { + this.entities = entities; + } + + + + public String getLdContext() { + return ldContext; + } + + + + public void setLdContext(String ldContext) { + this.ldContext = ldContext; + } + + + + public LDGeoQuery getLdGeoQuery() { + return ldGeoQuery; + } + + + + public void setLdGeoQuery(LDGeoQuery ldGeoQuery) { + this.ldGeoQuery = ldGeoQuery; + } + + + + public String getLdQuery() { + return ldQuery; + } + + + + public void setLdQuery(String ldQuery) { + this.ldQuery = ldQuery; + } + + + + public LDTemporalQuery getLdTempQuery() { + return ldTempQuery; + } + + + + public void setLdTempQuery(LDTemporalQuery ldTempQuery) { + this.ldTempQuery = ldTempQuery; + } + + + + public List getRequestorList() { + return requestorList; + } + + + + public void setRequestorList(List requestorList) { + this.requestorList = requestorList; + } + + public void addEntityInfo(EntityInfo entity) { + this.entities.add(entity); + } + public void removeEntityInfo(EntityInfo entity) { + this.entities.remove(entity); + } + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/QueryParams.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/QueryParams.java new file mode 100644 index 0000000000000000000000000000000000000000..f59e2b8055b7376bd51536699d69839012f8b736 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/QueryParams.java @@ -0,0 +1,314 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import com.google.gson.annotations.Expose; +import com.google.gson.annotations.SerializedName; +import org.apache.commons.lang.builder.ToStringBuilder; + +public class QueryParams { + + @SerializedName("id") + @Expose + private String id; + @SerializedName("type") + @Expose + private String type; + @SerializedName("attrs") + @Expose + private String attrs; + @SerializedName("instanceId") + @Expose + private String instanceId; + @SerializedName("idPattern") + @Expose + private String idPattern; + @SerializedName("q") + @Expose + private String q; + @SerializedName("georel") + @Expose + private GeoqueryRel georel; + @SerializedName("geometry") + @Expose + private String geometry; + @SerializedName("coordinates") + @Expose + private String coordinates; + @SerializedName("geoproperty") + @Expose + private String geoproperty; + @SerializedName("timerel") + @Expose + private String timerel; + @SerializedName("time") + @Expose + private String time; + @SerializedName("endTime") + @Expose + private String endTime; + @SerializedName("timeproperty") + @Expose + private String timeproperty; + @SerializedName("includeSysAttrs") + @Expose + private boolean includeSysAttrs; + @SerializedName("keyValues") + @Expose + private boolean keyValues; + @SerializedName("temporalValues") + @Expose + private boolean temporalValues; + + @SerializedName("limit") + @Expose + private int limit = -1; + + @SerializedName("offSet") + @Expose + private int offSet = -1; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public QueryParams withId(String id) { + this.id = id; + return this; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public QueryParams withType(String type) { + this.type = type; + return this; + } + + public String getAttrs() { + return attrs; + } + + public void setAttrs(String attrs) { + this.attrs = attrs; + } + + public QueryParams withAttrs(String attrs) { + this.attrs = attrs; + return this; + } + + public String getInstanceId() { + return instanceId; + } + + public void setInstanceId(String instanceId) { + this.instanceId = instanceId; + } + + public QueryParams withInstanceId(String instanceId) { + this.instanceId = instanceId; + return this; + } + + public String getIdPattern() { + return idPattern; + } + + public void setIdPattern(String idPattern) { + this.idPattern = idPattern; + } + + public QueryParams withIdPattern(String idPattern) { + this.idPattern = idPattern; + return this; + } + + public String getQ() { + return q; + } + + public void setQ(String q) { + this.q = q; + } + + public QueryParams withQ(String q) { + this.q = q; + return this; + } + + public GeoqueryRel getGeorel() { + return georel; + } + + public void setGeorel(GeoqueryRel georel) { + this.georel = georel; + } + + public QueryParams withGeorel(GeoqueryRel georel) { + this.georel = georel; + return this; + } + + public String getGeometry() { + return geometry; + } + + public void setGeometry(String geometry) { + this.geometry = geometry; + } + + public QueryParams withGeometry(String geometry) { + this.geometry = geometry; + return this; + } + + public String getCoordinates() { + return coordinates; + } + + public void setCoordinates(String coordinates) { + this.coordinates = coordinates; + } + + public QueryParams withCoordinates(String coordinates) { + this.coordinates = coordinates; + return this; + } + + public String getGeoproperty() { + return geoproperty; + } + + public void setGeoproperty(String geoproperty) { + this.geoproperty = geoproperty; + } + + public QueryParams withGeoproperty(String geoproperty) { + this.geoproperty = geoproperty; + return this; + } + + public String getTimerel() { + return timerel; + } + + public void setTimerel(String timerel) { + this.timerel = timerel; + } + + public QueryParams withTimerel(String timerel) { + this.timerel = timerel; + return this; + } + + public String getTime() { + return time; + } + + public void setTime(String time) { + this.time = time; + } + + public QueryParams withTime(String time) { + this.time = time; + return this; + } + + public String getEndTime() { + return endTime; + } + + public void setEndTime(String endTime) { + this.endTime = endTime; + } + + public QueryParams withEndTime(String endTime) { + this.endTime = endTime; + return this; + } + + public String getTimeproperty() { + return timeproperty; + } + + public void setTimeproperty(String timeproperty) { + this.timeproperty = timeproperty; + } + + public QueryParams withTimeproperty(String timeproperty) { + this.timeproperty = timeproperty; + return this; + } + + public boolean getIncludeSysAttrs() { + return includeSysAttrs; + } + + public void setIncludeSysAttrs(boolean includeSysAttrs) { + this.includeSysAttrs = includeSysAttrs; + } + + public QueryParams withIncludeSysAttrs(boolean includeSysAttrs) { + this.includeSysAttrs = includeSysAttrs; + return this; + } + + public boolean getKeyValues() { + return keyValues; + } + + public void setKeyValues(boolean keyValues) { + this.keyValues = keyValues; + } + + public QueryParams withKeyValues(boolean keyValues) { + this.keyValues = keyValues; + return this; + } + + public boolean getTemporalValues() { + return temporalValues; + } + + public void setTemporalValues(boolean temporalValues) { + this.temporalValues = temporalValues; + } + + public QueryParams withTemporalValues(boolean temporalValues) { + this.temporalValues = temporalValues; + return this; + } + + public int getLimit() { + return limit; + } + + public void setLimit(int limit) { + this.limit = limit; + } + + public int getOffSet() { + return offSet; + } + + public void setOffSet(int offSet) { + this.offSet = offSet; + } + + @Override + public String toString() { + return new ToStringBuilder(this).append("id", id).append("type", type).append("attrs", attrs) + .append("idPattern", idPattern).append("q", q).append("georel", georel).append("geometry", geometry) + .append("coordinates", coordinates).append("geoproperty", geoproperty).append("timerel", timerel) + .append("time", time).append("endTime", endTime).append("timeproperty", timeproperty).toString(); + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/QueryParams.schema.json b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/QueryParams.schema.json new file mode 100644 index 0000000000000000000000000000000000000000..6fcbefb5d8af17ec97298fe7dfbeadca665de809 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/QueryParams.schema.json @@ -0,0 +1,45 @@ +{ + "type":"object", + "properties": { + "id": { + "type": "string" + }, + "type": { + "type": "string" + }, + "attrs": { + "type": "string" + }, + "idPattern": { + "type": "string" + }, + "q": { + "type": "string" + }, + "georel": { + "type": "object", + "javaType" : "GeoqueryRel", + }, + "geometry": { + "type": "string" + }, + "coordinates": { + "type": "string" + }, + "geoproperty": { + "type": "string" + }, + "timerel": { + "type": "string" + }, + "time": { + "type": "string" + }, + "endTime": { + "type": "string" + }, + "timeproperty": { + "type": "string" + } + } +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/QueryResult.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/QueryResult.java new file mode 100644 index 0000000000000000000000000000000000000000..84d310e3273d8902f2a6e791cf39fd8b77babc7d --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/QueryResult.java @@ -0,0 +1,101 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.List; + +import eu.neclab.ngsildbroker.commons.enums.ErrorType; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class QueryResult extends BaseResult { + + public QueryResult(List dataString, String errorMsg, ErrorType errorType, int shortErrorMsg, boolean success) { + super(errorMsg, errorType, shortErrorMsg, success); + this.dataString = dataString; + } + + + private String qToken; + private Integer limit; + private Integer offset; + private Integer resultsLeftAfter; + private Integer resultsLeftBefore; + + private List dataString; + + + + public Integer getResultsLeftBefore() { + return resultsLeftBefore; + } + + + + public void setResultsLeftBefore(Integer resultsLeftBefore) { + this.resultsLeftBefore = resultsLeftBefore; + } + + + + public Integer getResultsLeftAfter() { + return resultsLeftAfter; + } + + + + public void setResultsLeftAfter(Integer resultsLeft) { + this.resultsLeftAfter = resultsLeft; + } + + + + public String getqToken() { + return qToken; + } + + + + public void setqToken(String qToken) { + this.qToken = qToken; + } + + + + public Integer getLimit() { + return limit; + } + + + + public void setLimit(Integer limit) { + this.limit = limit; + } + + + + public Integer getOffset() { + return offset; + } + + + + public void setOffset(Integer offset) { + this.offset = offset; + } + + + public List getDataString() { + return dataString; + } + + public void setDataString(List dataString) { + this.dataString = dataString; + } + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/QueryTerm.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/QueryTerm.java new file mode 100644 index 0000000000000000000000000000000000000000..70f221c99ad24b5c4495b2891d844e6b82be8577 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/QueryTerm.java @@ -0,0 +1,1243 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import eu.neclab.ngsildbroker.commons.constants.DBConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.BadRequestException; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.tools.SerializationTools; + +public class QueryTerm { + + private static final String RANGE = ".+\\.\\..+"; + private static final String LIST = ".+(,.+)+"; + private static final String URI = "\\w+:(\\/?\\/?)[^\\s^;]+"; + private static final String DATETIME = "\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d(,\\d\\d\\d\\d\\d\\d)?Z"; + private static final String DATE = "\\d\\d\\d\\d-\\d\\d-\\d\\d"; + private static final String TIME = "\\d\\d:\\d\\d:\\d\\d(,\\d\\d\\d\\d\\d\\d)?Z"; + + private static final List TIME_PROPS = Arrays.asList(NGSIConstants.NGSI_LD_OBSERVED_AT, + NGSIConstants.NGSI_LD_CREATED_AT, NGSIConstants.NGSI_LD_MODIFIED_AT); + private List linkHeaders; + private QueryTerm next = null; + private boolean nextAnd = true; + private QueryTerm firstChild = null; + private QueryTerm parent = null; + private String attribute = ""; + private String operator = ""; + private String operant = ""; + + ParamsResolver paramsResolver; + + public QueryTerm(List linkHeaders, ParamsResolver paramsResolver) { + this.linkHeaders = linkHeaders; + this.paramsResolver = paramsResolver; + } + + public boolean hasNext() { + return next != null; + } + + public boolean calculate(BaseProperty property) throws ResponseException { + ArrayList temp = new ArrayList(); + temp.add(property); + return calculate(temp); + + } + + /** + * + * @param properties + * @return + * @throws ResponseException + */ + public boolean calculate(List properties) throws ResponseException { + boolean result = false; + if (firstChild == null) { + result = calculate(properties, attribute, operator, operant); + } else { + result = firstChild.calculate(properties); + } + if (hasNext()) { + if (nextAnd) { + result = result && next.calculate(properties); + } else { + result = result || next.calculate(properties); + } + } + + return result; + } + + private boolean calculate(List properties, String attribute, String operator, String operant) + throws ResponseException { + + if (!attribute.matches(URI) && attribute.contains(".")) { + String[] splittedAttrib = attribute.split("\\."); + ArrayList newProps = new ArrayList(); + String expanded = expandAttributeName(splittedAttrib[0]); + if (expanded == null) { + return false; + } + List potentialMatches = getMatchingProperties(properties, expanded); + if (potentialMatches == null) { + return false; + } + for (BaseProperty potentialMatch : potentialMatches) { + newProps.addAll(getSubAttributes(potentialMatch)); + } + newProps.addAll(potentialMatches); + + String newAttrib; + if (splittedAttrib.length > 2) { + newAttrib = String.join(".", Arrays.copyOfRange(splittedAttrib, 1, splittedAttrib.length - 1)); + } else { + newAttrib = splittedAttrib[1]; + } + return calculate(newProps, newAttrib, operator, operant); + } else { + String[] compound = null; + if (attribute.contains("[")) { + compound = attribute.split("\\["); + attribute = compound[0]; + compound = Arrays.copyOfRange(compound, 1, compound.length); + } + String myAttribName = expandAttributeName(attribute); + if (myAttribName == null) { + return false; + } + boolean finalReturnValue = false; + int index = NGSIConstants.SPECIAL_PROPERTIES.indexOf(myAttribName); + Object value; + List myProperties; + if (index == -1) { + myProperties = getMatchingProperties(properties, myAttribName); + if (myProperties == null) { + return false; + } + } else { + myProperties = properties; + } + for (BaseProperty myProperty : myProperties) { + Iterator it = myProperty.getEntries().values().iterator(); + while (it.hasNext()) { + BaseEntry next = (BaseEntry) it.next(); + boolean skip = false; + switch (index) { + case 0: + // NGSI_LD_CREATED_AT + value = next.getCreatedAt(); + break; + case 1: + // NGSI_LD_OBSERVED_AT + value = next.getObservedAt(); + break; + case 2: + // NGSI_LD_MODIFIED_AT + value = next.getModifiedAt(); + break; + case 3: + // NGSI_LD_DATA_SET_ID + value = next.getCreatedAt(); + case 4: + // NGSI_LD_UNIT_CODE + if (next instanceof PropertyEntry) { + value = ((PropertyEntry) next).getUnitCode(); + } + default: + + value = getValue(next); + if (compound != null) { + value = getCompoundValue(value, compound); + } + break; + } + if (value == null) { + break; + } + operant = operant.replace("\"", ""); + if (TIME_PROPS.contains(myAttribName)) { + try { + operant = SerializationTools.date2Long(operant).toString(); + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData, e.getMessage()); + } + } + if (operant.matches(RANGE)) { + String[] range = operant.split("\\.\\."); + + switch (operator) { + case "==": + if (range[0].compareTo(value.toString()) <= 0 + && range[1].compareTo(value.toString()) >= 0) { + return true; + } + break; + case "!=": + if (range[0].compareTo(value.toString()) <= 0 + && range[1].compareTo(value.toString()) <= 0) { + return true; + } + break; + } + + return false; + + } else if (operant.matches(LIST)) { + List listOfOperants = Arrays.asList(operant.split(",")); + if (!(value instanceof List)) { + return false; + } + List myList = (List) value; + switch (operator) { + case "!=": + for (String listOperant : listOfOperants) { + if (myList.contains(listOperant)) { + return false; + } + } + return true; + case "==": + for (String listOperant : listOfOperants) { + if (myList.contains(listOperant)) { + return true; + } + } + return false; + default: + return false; + } + } else { + switch (operator) { + case "==": + if (value instanceof List) { + return listContains((List) value, operant); + } + if (operant.equals(value.toString())) { + return true; + } + break; + case "!=": + finalReturnValue = true; + if (value instanceof List) { + return !listContains((List) value, operant); + } + if (operant.equals(value.toString())) { + return false; + } + break; + case ">=": + if (value.toString().compareTo(operant) >= 0) { + return true; + } + break; + case "<=": + if (value.toString().compareTo(operant) <= 0) { + return true; + } + break; + case ">": + if (value.toString().compareTo(operant) > 0) { + return true; + } + break; + case "<": + if (value.toString().compareTo(operant) < 0) { + return true; + } + break; + case "~=": + if (value.toString().matches(operant)) { + return true; + } + break; + case "!~=": + finalReturnValue = true; + if (value.toString().matches(operant)) { + return false; + } + break; + } + + } + + } + + } + return finalReturnValue; + + } + + } + + private boolean listContains(List value, String operant) { + for (Object entry : value) { + if (entry.toString().equals(operant)) { + return true; + } + } + return false; + } + + private Collection getSubAttributes(BaseProperty potentialMatch) { + ArrayList result = new ArrayList(); + Iterator it = potentialMatch.getEntries().values().iterator(); + while (it.hasNext()) { + BaseEntry next = (BaseEntry) it.next(); + if (next.getRelationships() != null) { + result.addAll(next.getRelationships()); + } + if (next.getProperties() != null) { + result.addAll(next.getProperties()); + } + } + return result; + } + + /* + * private List getAllNGSIBaseProperties(BaseProperty prop) { + * ArrayList result = new ArrayList(); try { if + * (prop.getCreatedAt() != -1l) { Property createdAtProp = new Property(); + * createdAtProp.setId(new URI(NGSIConstants.NGSI_LD_CREATED_AT)); + * createdAtProp.setSingleEntry(new PropertyEntry("createdAt", + * prop.getCreatedAt())); result.add(createdAtProp); } if (prop.getObservedAt() + * != -1l) { Property observedAtProp = new Property(); observedAtProp.setId(new + * URI(NGSIConstants.NGSI_LD_OBSERVED_AT)); observedAtProp.setSingleEntry(new + * PropertyEntry("observerAt", prop.getObservedAt())); + * result.add(observedAtProp); } + * + * if (prop.getModifiedAt() != -1l) { Property modifiedAtProp = new Property(); + * modifiedAtProp.setId(new URI(NGSIConstants.NGSI_LD_MODIFIED_AT)); + * modifiedAtProp.setSingleEntry(new PropertyEntry("modifiedAt", + * prop.getModifiedAt())); result.add(modifiedAtProp); } if (prop instanceof + * Property) { Property realProp = (Property) prop; if (realProp.getUnitCode() + * != null && realProp.getUnitCode().equals("")) { Property unitCodeProp = new + * Property(); unitCodeProp.setId(new URI(NGSIConstants.NGSI_LD_UNIT_CODE)); + * unitCodeProp.setSingleEntry(new PropertyEntry("unitCode", + * realProp.getUnitCode())); result.add(unitCodeProp); } } } catch + * (URISyntaxException e) { // Left Empty intentionally. Should never happen + * since the URI constants are // controlled } return result; } + */ + + private Object getCompoundValue(Object value, String[] compound) throws ResponseException { + if (!(value instanceof Map)) { + return null; + } + Map complexValue = (Map) value; + String firstElement = expandAttributeName(compound[0].replaceAll("\\]", "").replaceAll("\\[", "")); + Object potentialResult = complexValue.get(firstElement); + if (potentialResult == null) { + return null; + } + if (potentialResult instanceof List) { + potentialResult = ((List) potentialResult).get(0); + } + if (compound.length == 1) { + return potentialResult; + } + + return getCompoundValue(potentialResult, Arrays.copyOfRange(compound, 1, compound.length)); + } + + private Object getValue(BaseEntry myEntry) { + Object value = null; + if (myEntry instanceof PropertyEntry) { + value = ((PropertyEntry) myEntry).getValue(); + if (value instanceof List) { + value = ((List) value).get(0); + } + } else if (myEntry instanceof RelationshipEntry) { + value = ((RelationshipEntry) myEntry).getObject().toString(); + } + return value; + } + + private List getMatchingProperties(List properties, String myAttribName) { + ArrayList result = new ArrayList(); + if (properties == null || properties.isEmpty()) { + return null; + } + + for (BaseProperty property : properties) { + if (property.getId().toString().equals(myAttribName)) { + result.add(property); + } + } + if (result.isEmpty()) { + return null; + } + return result; + } + + private String expandAttributeName(String attribute) throws ResponseException { + + return paramsResolver.expandAttribute(attribute, linkHeaders); + + } + + public QueryTerm getNext() { + return next; + } + + public void setNext(QueryTerm next) { + this.next = next; + this.next.setParent(this.getParent()); + } + + public boolean isNextAnd() { + return nextAnd; + } + + public void setNextAnd(boolean nextAnd) { + this.nextAnd = nextAnd; + } + + public QueryTerm getFirstChild() { + return firstChild; + } + + public void setFirstChild(QueryTerm firstChild) { + this.firstChild = firstChild; + this.firstChild.setParent(this); + } + + public String getAttribute() { + return attribute; + } + + public void setAttribute(String attribute) { + this.attribute = attribute; + } + + public String getOperator() { + return operator; + } + + public void setOperator(String operator) { + this.operator = operator; + } + + public String getOperant() { + return operant; + } + + public void setOperant(String operant) { + if (operant.matches(URI) && !operant.matches(TIME)) { // uri and time patterns are ambiguous in the abnf grammar + this.operant = "\"" + operant + "\""; + } else { + this.operant = operant; + } + + } + + public QueryTerm getParent() { + return parent; + } + + public void setParent(QueryTerm parent) { + this.parent = parent; + } + + @Override + public String toString() { + return "QueryTerm [next=" + next + ", nextAnd=" + nextAnd + ", firstChild=" + firstChild + ", attribute=" + + attribute + ", operator=" + operator + ", operant=" + operant + "]"; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((attribute == null) ? 0 : attribute.hashCode()); + result = prime * result + ((firstChild == null) ? 0 : firstChild.hashCode()); + result = prime * result + ((next == null) ? 0 : next.hashCode()); + result = prime * result + (nextAnd ? 1231 : 1237); + result = prime * result + ((operant == null) ? 0 : operant.hashCode()); + result = prime * result + ((operator == null) ? 0 : operator.hashCode()); + result = prime * result + ((parent == null) ? 0 : parent.hashCode()); + return result; + } + + public boolean equals(Object obj, boolean ignoreKids) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + QueryTerm other = (QueryTerm) obj; + if (attribute == null) { + if (other.attribute != null) + return false; + } else if (!attribute.equals(other.attribute)) + return false; + if (!ignoreKids) { + if (firstChild == null) { + if (other.firstChild != null) + return false; + } else if (!firstChild.equals(other.firstChild)) + return false; + } + if (next == null) { + if (other.next != null) + return false; + } else if (!next.equals(other.next)) + return false; + if (nextAnd != other.nextAnd) + return false; + if (operant == null) { + if (other.operant != null) + return false; + } else if (!operant.equals(other.operant)) + return false; + if (operator == null) { + if (other.operator != null) + return false; + } else if (!operator.equals(other.operator)) + return false; + if (parent == null) { + if (other.parent != null) + return false; + } else if (!parent.equals(other.parent, true)) + return false; + return true; + } + + @Override + public boolean equals(Object obj) { + return equals(obj, false); + } + + public String toSql() throws ResponseException { + StringBuilder builder = new StringBuilder(); + toSql(builder, false); + // builder.append(";"); + return builder.toString(); + } + + public String toSql(boolean temporalEntityFormat) throws ResponseException { + StringBuilder builder = new StringBuilder(); + toSql(builder, temporalEntityFormat); + // builder.append(";"); + return builder.toString(); + } + + private void toSql(StringBuilder result, boolean temporalEntityMode) throws ResponseException { + if (firstChild != null) { + result.append("("); + firstChild.toSql(result, temporalEntityMode); + result.append(")"); + } else { + if (temporalEntityMode) { + getAttribQueryForTemporalEntity(result); + } else { + getAttribQueryV2(result); + } + } + if (hasNext()) { + if (nextAnd) { + result.append(" and "); + } else { + result.append(" or "); + } + next.toSql(result, temporalEntityMode); + } + } + + private void getAttribQuery(StringBuilder result) throws ResponseException { + ArrayList attribPath = getAttribPathArray(); + + StringBuilder testAttributeExistsProperty = new StringBuilder(""); + StringBuilder testAttributeExistsRelationship = new StringBuilder(""); + StringBuilder attributeFilterProperty = new StringBuilder(""); + StringBuilder attributeFilterRelationship = new StringBuilder(""); + StringBuilder testValueTypeForPatternOp = new StringBuilder(""); + StringBuilder testValueTypeForDateTime = new StringBuilder(""); + + String reservedDbColumn = null; + if (attribPath.size() == 1) { + // if not mapped, returns null + reservedDbColumn = DBConstants.NGSILD_TO_SQL_RESERVED_PROPERTIES_MAPPING.get(attribPath.get(0)); + } + + // do not use createdAt/modifiedAt db columns if value (operant) is not a + // date/time value + if (reservedDbColumn != null + && (reservedDbColumn.equals(DBConstants.DBCOLUMN_CREATED_AT) + || reservedDbColumn.equals(DBConstants.DBCOLUMN_MODIFIED_AT)) + && !(operant.matches(DATE) || operant.matches(TIME) || operant.matches(DATETIME))) { + reservedDbColumn = null; + } + + if (reservedDbColumn == null) { + testAttributeExistsProperty.append("data@>'{\""); + testAttributeExistsRelationship.append("data@>'{\""); + attributeFilterProperty.append("(data#"); + attributeFilterRelationship.append("data#"); + testValueTypeForPatternOp.append("jsonb_typeof(data#>'{"); + testValueTypeForDateTime.append("data#>>'{"); + if (operator.equals(NGSIConstants.QUERY_PATTERNOP) || operator.equals(NGSIConstants.QUERY_NOTPATTERNOP) + || operant.matches(DATE) || operant.matches(TIME) || operant.matches(DATETIME)) { + attributeFilterProperty.append(">>"); + attributeFilterRelationship.append(">>"); + } else { + attributeFilterProperty.append(">"); + attributeFilterRelationship.append(">"); + } + attributeFilterProperty.append("'{"); + attributeFilterRelationship.append("'{"); + + int iElem = 0; + String lastAttribute = ""; + for (String subPath : attribPath) { + // in compoundAttrName filter, Property test only applies to the top level + // element + if (!attribute.contains("[") || attribute.contains("[") && iElem == 0) { + testAttributeExistsProperty.append(subPath); + testAttributeExistsProperty.append("\":[{\""); + testAttributeExistsRelationship.append(subPath); + testAttributeExistsRelationship.append("\":[{\""); + } + attributeFilterProperty.append(subPath); + attributeFilterProperty.append(",0,"); + attributeFilterRelationship.append(subPath); + attributeFilterRelationship.append(",0,"); + testValueTypeForPatternOp.append(subPath); + testValueTypeForPatternOp.append(",0,"); + testValueTypeForDateTime.append(subPath); + testValueTypeForDateTime.append(",0,"); + // in compoundAttrName filter, hasValue/hasObject is in the top level element + if (attribute.contains("[") && iElem == 0) { + attributeFilterProperty.append(NGSIConstants.NGSI_LD_HAS_VALUE + ",0,"); + attributeFilterRelationship.append(NGSIConstants.NGSI_LD_HAS_OBJECT + ",0,"); + testValueTypeForPatternOp.append(NGSIConstants.NGSI_LD_HAS_VALUE + ",0,"); + testValueTypeForDateTime.append(NGSIConstants.NGSI_LD_HAS_VALUE + ",0,"); + } + iElem++; + lastAttribute = subPath; + } + + // createdAt/modifiedAt/observedAt type is DateTime (without array brackets) + if (lastAttribute.equals(NGSIConstants.NGSI_LD_CREATED_AT) + || lastAttribute.equals(NGSIConstants.NGSI_LD_MODIFIED_AT) + || lastAttribute.equals(NGSIConstants.NGSI_LD_OBSERVED_AT)) { + testAttributeExistsProperty.append("@type\":\"" + NGSIConstants.NGSI_LD_DATE_TIME + "\""); + } else { + testAttributeExistsProperty.append("@type\":[\"" + NGSIConstants.NGSI_LD_PROPERTY + "\"]"); + } + testAttributeExistsRelationship.append("@type\":[\"" + NGSIConstants.NGSI_LD_RELATIONSHIP + "\"]"); + for (int i = 0; i < attribPath.size(); i++) { + if (!attribute.contains("[") || attribute.contains("[") && i == 0) { + testAttributeExistsProperty.append("}]"); + testAttributeExistsRelationship.append("}]"); + } + } + testAttributeExistsProperty.append("}'"); + testAttributeExistsRelationship.append("}'"); + // in compoundAttrName, hasValue is at the top level element. + // createdAt/modifiedAt/observedAt properties do not have a hasValue element + if (!attribute.contains("[") && !lastAttribute.equals(NGSIConstants.NGSI_LD_CREATED_AT) + && !lastAttribute.equals(NGSIConstants.NGSI_LD_MODIFIED_AT) + && !lastAttribute.equals(NGSIConstants.NGSI_LD_OBSERVED_AT)) { + attributeFilterProperty.append(NGSIConstants.NGSI_LD_HAS_VALUE + ",0,"); + attributeFilterRelationship.append(NGSIConstants.NGSI_LD_HAS_OBJECT + ",0,"); + testValueTypeForPatternOp.append(NGSIConstants.NGSI_LD_HAS_VALUE + ",0,"); + testValueTypeForDateTime.append(NGSIConstants.NGSI_LD_HAS_VALUE + ",0,"); + } + attributeFilterProperty.append("@value}')"); + attributeFilterRelationship.append("@id}'"); + testValueTypeForPatternOp.append("@value}') = 'string'"); + testValueTypeForDateTime.append("@type}' = "); + if (operant.matches(DATETIME)) { + attributeFilterProperty.append("::timestamp "); + testValueTypeForDateTime.append("'" + NGSIConstants.NGSI_LD_DATE_TIME + "'"); + } else if (operant.matches(DATE)) { + attributeFilterProperty.append("::date "); + testValueTypeForDateTime.append("'" + NGSIConstants.NGSI_LD_DATE + "'"); + } else if (operant.matches(TIME)) { + attributeFilterProperty.append("::time "); + testValueTypeForDateTime.append("'" + NGSIConstants.NGSI_LD_TIME + "'"); + } + + } else { + attributeFilterProperty.append(reservedDbColumn); + } + + boolean useRelClause = applyOperator(attributeFilterProperty, attributeFilterRelationship); + + if (reservedDbColumn == null) { + if (useRelClause) { + result.append("((" + testAttributeExistsProperty.toString() + " and " + + attributeFilterProperty.toString() + ") or (" + testAttributeExistsRelationship.toString() + + " and " + attributeFilterRelationship.toString() + "))"); + } else { + result.append( + "(" + testAttributeExistsProperty.toString() + " and " + attributeFilterProperty.toString()); + if (operator.equals(NGSIConstants.QUERY_PATTERNOP) + || operator.equals(NGSIConstants.QUERY_NOTPATTERNOP)) { + result.append(" and " + testValueTypeForPatternOp.toString()); + } + if (operant.matches(DATE) || operant.matches(TIME) || operant.matches(DATETIME)) { + result.append(" and " + testValueTypeForDateTime.toString()); + } + result.append(")"); + } + } else { + result.append("(" + attributeFilterProperty.toString() + ")"); + } + + } + + private void getAttribQueryV2(StringBuilder result) throws ResponseException { + ArrayList attribPath = getAttribPathArray(); + + StringBuilder attributeFilterProperty = new StringBuilder(""); + + String reservedDbColumn = null; + if (attribPath.size() == 1) { + // if not mapped, returns null + reservedDbColumn = DBConstants.NGSILD_TO_SQL_RESERVED_PROPERTIES_MAPPING.get(attribPath.get(0)); + } + + // do not use createdAt/modifiedAt db columns if value (operant) is not a + // date/time value + if (reservedDbColumn != null + && (reservedDbColumn.equals(DBConstants.DBCOLUMN_CREATED_AT) + || reservedDbColumn.equals(DBConstants.DBCOLUMN_MODIFIED_AT)) + && !(operant.matches(DATE) || operant.matches(TIME) || operant.matches(DATETIME))) { + reservedDbColumn = null; + } + if (reservedDbColumn != null) { + attributeFilterProperty.append(reservedDbColumn); + applyOperator(attributeFilterProperty); + } else { + /* + * EXISTS (SELECT FROM + * jsonb_array_elements(data#>'{https://uri.etsi.org/ngsi-ld/default-context/ + * friend}') as x WHERE x#> '{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}' = + * '"urn:person:Victoria"' OR x#> + * '{https://uri.etsi.org/ngsi-ld/hasValue,0,@id}' = '"urn:person:Victoria"') + */ + + int iElem = 0; + String currentSet = "data"; + char charcount = 'a'; + String lastAttrib = null; + for (String subPath : attribPath) { + attributeFilterProperty.append("EXISTS (SELECT FROM jsonb_array_elements(" + currentSet + "#>'{"); + attributeFilterProperty.append(subPath); + if (attribute.contains("[")&&attribute.contains(".") && iElem == 1) { + attributeFilterProperty.append(",0," + NGSIConstants.NGSI_LD_HAS_VALUE); + } + else if (attribute.contains("[") &&!attribute.contains(".") && iElem == 0) { + attributeFilterProperty.append(",0," + NGSIConstants.NGSI_LD_HAS_VALUE); + } + attributeFilterProperty.append("}') as "); + attributeFilterProperty.append(charcount); + currentSet = "" + charcount; + attributeFilterProperty.append(" WHERE "); + charcount++; + iElem++; + lastAttrib = subPath; + } + + // x#> '{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}' + charcount--; + if (operator.equals(NGSIConstants.QUERY_EQUAL) || operator.equals(NGSIConstants.QUERY_UNEQUAL) + || operator.equals(NGSIConstants.QUERY_PATTERNOP) + || operator.equals(NGSIConstants.QUERY_NOTPATTERNOP)) { + attributeFilterProperty.append(charcount); + attributeFilterProperty.append("#> '{"); + attributeFilterProperty.append("https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'"); + applyOperator(attributeFilterProperty); + attributeFilterProperty.append(" OR "); + } + attributeFilterProperty.append('('); + attributeFilterProperty.append(charcount); + attributeFilterProperty.append("#>"); + if (operator.equals(NGSIConstants.QUERY_PATTERNOP) || operator.equals(NGSIConstants.QUERY_NOTPATTERNOP) + || operant.matches(DATE) || operant.matches(TIME) || operant.matches(DATETIME)) { + attributeFilterProperty.append(">"); + } + attributeFilterProperty.append(" '{"); + attributeFilterProperty.append("https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')"); + if (operant.matches(DATETIME)) { + attributeFilterProperty.append("::timestamp "); + } else if (operant.matches(DATE)) { + attributeFilterProperty.append("::date "); + } else if (operant.matches(TIME)) { + attributeFilterProperty.append("::time "); + } + applyOperator(attributeFilterProperty); + attributeFilterProperty.append(" OR "); + if (TIME_PROPS.contains(lastAttrib)) { + attributeFilterProperty.append('('); + attributeFilterProperty.append((char) (charcount - 1)); + attributeFilterProperty.append("#>>"); + attributeFilterProperty.append(" '{"); + attributeFilterProperty.append(lastAttrib); + attributeFilterProperty.append(",0,@value}')"); + + } else if (lastAttrib.equals(NGSIConstants.NGSI_LD_DATA_SET_ID)) { + attributeFilterProperty.append('('); + attributeFilterProperty.append(charcount); + attributeFilterProperty.append("#>"); + if (operator.equals(NGSIConstants.QUERY_PATTERNOP) || operator.equals(NGSIConstants.QUERY_NOTPATTERNOP) + || operant.matches(DATE) || operant.matches(TIME) || operant.matches(DATETIME)) { + attributeFilterProperty.append(">"); + } + attributeFilterProperty.append(" '{"); + attributeFilterProperty.append("@id}')"); + } else { + attributeFilterProperty.append('('); + attributeFilterProperty.append(charcount); + attributeFilterProperty.append("#>"); + if (operator.equals(NGSIConstants.QUERY_PATTERNOP) || operator.equals(NGSIConstants.QUERY_NOTPATTERNOP) + || operant.matches(DATE) || operant.matches(TIME) || operant.matches(DATETIME)) { + attributeFilterProperty.append(">"); + } + attributeFilterProperty.append(" '{"); + attributeFilterProperty.append("@value}')"); + + } + + if (operant.matches(DATETIME)) { + attributeFilterProperty.append("::timestamp "); + } else if (operant.matches(DATE)) { + attributeFilterProperty.append("::date "); + } else if (operant.matches(TIME)) { + attributeFilterProperty.append("::time "); + } + applyOperator(attributeFilterProperty); + for (int i = 0; i < attribPath.size(); i++) { + attributeFilterProperty.append(')'); + } + } + + result.append("(" + attributeFilterProperty.toString() + ")"); + + } + + private ArrayList getAttribPathArray() throws ResponseException { + ArrayList attribPath = new ArrayList(); + if (attribute.contains("[") && attribute.contains(".")) { + if (attribute.contains(".")) { + for (String subPart : attribute.split("\\.")) { + if (subPart.contains("[")) { + for (String subParts : subPart.split("\\[")) { + subParts = subParts.replaceAll("\\]", ""); + attribPath.add(expandAttributeName(subParts)); + } + } else { + attribPath.add(expandAttributeName(subPart)); + } + } + } + } + else if (attribute.contains("[")) { + for (String subPart : attribute.split("\\[")) { + subPart = subPart.replaceAll("\\]", ""); + attribPath.add(expandAttributeName(subPart)); + } + } else if (attribute.matches(URI)) { + attribPath.add(expandAttributeName(attribute)); + } else if (attribute.contains(".")) { + for (String subPart : attribute.split("\\.")) { + attribPath.add(expandAttributeName(subPart)); + } + } else { + attribPath.add(expandAttributeName(attribute)); + } + return attribPath; + } + + private boolean applyOperator(StringBuilder attributeFilterProperty) throws BadRequestException { + boolean useRelClause = false; + + String typecast = "jsonb"; + if (operant.matches(DATETIME)) { + typecast = "timestamp"; + } else if (operant.matches(DATE)) { + typecast = "date"; + } else if (operant.matches(TIME)) { + typecast = "time"; + } + + switch (operator) { + case NGSIConstants.QUERY_EQUAL: + if (operant.matches(LIST)) { + attributeFilterProperty.append(" in ("); + for (String listItem : operant.split(",")) { + attributeFilterProperty.append("'" + listItem + "'::" + typecast + ","); + } + attributeFilterProperty.setCharAt(attributeFilterProperty.length() - 1, ')'); + } else if (operant.matches(RANGE)) { + String[] myRange = operant.split("\\.\\."); + attributeFilterProperty.append( + " between '" + myRange[0] + "'::" + typecast + " and '" + myRange[1] + "'::" + typecast); + } else { + attributeFilterProperty.append(" = '" + operant + "'::" + typecast); + + } + useRelClause = !(operant.matches(DATE) || operant.matches(TIME) || operant.matches(DATETIME)); + break; + case NGSIConstants.QUERY_UNEQUAL: + if (operant.matches(LIST)) { + attributeFilterProperty.append(" not in ("); + for (String listItem : operant.split(",")) { + attributeFilterProperty.append("'" + listItem + "'::" + typecast + ","); + } + attributeFilterProperty.setCharAt(attributeFilterProperty.length() - 1, ')'); + } else if (operant.matches(RANGE)) { + String[] myRange = operant.split("\\.\\."); + attributeFilterProperty.append( + " not between '" + myRange[0] + "'::" + typecast + " and '" + myRange[1] + "'::" + typecast); + } else { + attributeFilterProperty.append(" <> '" + operant + "'::" + typecast); + } + useRelClause = !(operant.matches(DATE) || operant.matches(TIME) || operant.matches(DATETIME)); + break; + case NGSIConstants.QUERY_GREATEREQ: + if (operant.matches(LIST)) { + throw new BadRequestException(); + } + if (operant.matches(RANGE)) { + throw new BadRequestException(); + } + attributeFilterProperty.append(" >= '" + operant + "'::" + typecast); + break; + case NGSIConstants.QUERY_LESSEQ: + if (operant.matches(LIST)) { + throw new BadRequestException(); + } + if (operant.matches(RANGE)) { + throw new BadRequestException(); + } + attributeFilterProperty.append(" <= '" + operant + "'::" + typecast); + break; + case NGSIConstants.QUERY_GREATER: + if (operant.matches(LIST)) { + throw new BadRequestException(); + } + if (operant.matches(RANGE)) { + throw new BadRequestException(); + } + attributeFilterProperty.append(" > '" + operant + "'::" + typecast); + break; + case NGSIConstants.QUERY_LESS: + if (operant.matches(LIST)) { + throw new BadRequestException(); + } + if (operant.matches(RANGE)) { + throw new BadRequestException(); + } + attributeFilterProperty.append(" < '" + operant + "'::" + typecast); + break; + case NGSIConstants.QUERY_PATTERNOP: + if (operant.matches(LIST)) { + throw new BadRequestException(); + } + if (operant.matches(RANGE)) { + throw new BadRequestException(); + } + attributeFilterProperty.append(" ~ '" + operant + "'"); + break; + case NGSIConstants.QUERY_NOTPATTERNOP: + if (operant.matches(LIST)) { + throw new BadRequestException(); + } + if (operant.matches(RANGE)) { + throw new BadRequestException(); + } + attributeFilterProperty.append(" !~ '" + operant + "'"); + break; + default: + throw new BadRequestException(); + } + return useRelClause; + } + + private boolean applyOperator(StringBuilder attributeFilterProperty, StringBuilder attributeFilterRelationship) + throws BadRequestException { + boolean useRelClause = false; + + String typecast = "jsonb"; + if (operant.matches(DATETIME)) { + typecast = "timestamp"; + } else if (operant.matches(DATE)) { + typecast = "date"; + } else if (operant.matches(TIME)) { + typecast = "time"; + } + + switch (operator) { + case NGSIConstants.QUERY_EQUAL: + if (operant.matches(LIST)) { + attributeFilterProperty.append(" in ("); + attributeFilterRelationship.append(" in ("); + for (String listItem : operant.split(",")) { + attributeFilterProperty.append("'" + listItem + "'::" + typecast + ","); + attributeFilterRelationship.append("'" + listItem + "'::" + typecast + ","); + } + attributeFilterProperty.setCharAt(attributeFilterProperty.length() - 1, ')'); + attributeFilterRelationship.setCharAt(attributeFilterRelationship.length() - 1, ')'); + } else if (operant.matches(RANGE)) { + String[] myRange = operant.split("\\.\\."); + attributeFilterProperty.append( + " between '" + myRange[0] + "'::" + typecast + " and '" + myRange[1] + "'::" + typecast); + attributeFilterRelationship.append( + " between '" + myRange[0] + "'::" + typecast + " and '" + myRange[1] + "'::" + typecast); + } else { + attributeFilterProperty.append(" = '" + operant + "'::" + typecast); + attributeFilterRelationship.append(" = '" + operant + "'::" + typecast); + + } + useRelClause = !(operant.matches(DATE) || operant.matches(TIME) || operant.matches(DATETIME)); + break; + case NGSIConstants.QUERY_UNEQUAL: + if (operant.matches(LIST)) { + attributeFilterProperty.append(" not in ("); + attributeFilterRelationship.append(" not in ("); + for (String listItem : operant.split(",")) { + attributeFilterProperty.append("'" + listItem + "'::" + typecast + ","); + attributeFilterRelationship.append("'" + listItem + "'::" + typecast + ","); + } + attributeFilterProperty.setCharAt(attributeFilterProperty.length() - 1, ')'); + attributeFilterRelationship.setCharAt(attributeFilterRelationship.length() - 1, ')'); + } else if (operant.matches(RANGE)) { + String[] myRange = operant.split("\\.\\."); + attributeFilterProperty.append( + " not between '" + myRange[0] + "'::" + typecast + " and '" + myRange[1] + "'::" + typecast); + attributeFilterRelationship.append( + " not between '" + myRange[0] + "'::" + typecast + " and '" + myRange[1] + "'::" + typecast); + } else { + attributeFilterProperty.append(" <> '" + operant + "'::" + typecast); + attributeFilterRelationship.append(" <> '" + operant + "'::" + typecast); + + } + useRelClause = !(operant.matches(DATE) || operant.matches(TIME) || operant.matches(DATETIME)); + break; + case NGSIConstants.QUERY_GREATEREQ: + if (operant.matches(LIST)) { + throw new BadRequestException(); + } + if (operant.matches(RANGE)) { + throw new BadRequestException(); + } + attributeFilterProperty.append(" >= '" + operant + "'::" + typecast); + break; + case NGSIConstants.QUERY_LESSEQ: + if (operant.matches(LIST)) { + throw new BadRequestException(); + } + if (operant.matches(RANGE)) { + throw new BadRequestException(); + } + attributeFilterProperty.append(" <= '" + operant + "'::" + typecast); + break; + case NGSIConstants.QUERY_GREATER: + if (operant.matches(LIST)) { + throw new BadRequestException(); + } + if (operant.matches(RANGE)) { + throw new BadRequestException(); + } + attributeFilterProperty.append(" > '" + operant + "'::" + typecast); + break; + case NGSIConstants.QUERY_LESS: + if (operant.matches(LIST)) { + throw new BadRequestException(); + } + if (operant.matches(RANGE)) { + throw new BadRequestException(); + } + attributeFilterProperty.append(" < '" + operant + "'::" + typecast); + break; + case NGSIConstants.QUERY_PATTERNOP: + if (operant.matches(LIST)) { + throw new BadRequestException(); + } + if (operant.matches(RANGE)) { + throw new BadRequestException(); + } + attributeFilterProperty.append(" ~ '" + operant + "'"); + break; + case NGSIConstants.QUERY_NOTPATTERNOP: + if (operant.matches(LIST)) { + throw new BadRequestException(); + } + if (operant.matches(RANGE)) { + throw new BadRequestException(); + } + attributeFilterProperty.append(" !~ '" + operant + "'"); + break; + default: + throw new BadRequestException(); + } + return useRelClause; + } + + private void getAttribQueryForTemporalEntity(StringBuilder result) throws ResponseException { + ArrayList attribPath = getAttribPathArray(); + //https://uri.etsi.org/ngsi-ld/default-context/abstractionLevel,0 + /* + * String attribId = null; for (String subPath : attribPath) { attribId = + * subPath; break; // sub-properties are not supported yet in HistoryManager } + */ + + int iElem = 0; + String currentSet = "m.attrdata"; + char charcount = 'a'; + String lastAttrib = null; + for (String subPath : attribPath) { + result.append("EXISTS (SELECT FROM jsonb_array_elements(" + currentSet + "#>'{"); + result.append(subPath); + if (attribute.contains("[") && iElem == 0) { + result.append(",0," + NGSIConstants.NGSI_LD_HAS_VALUE); + } + result.append("}') as "); + result.append(charcount); + currentSet = "" + charcount; + result.append(" WHERE "); + charcount++; + iElem++; + lastAttrib = subPath; + } + + // x#> '{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}' + charcount--; + if (operator.equals(NGSIConstants.QUERY_EQUAL) || operator.equals(NGSIConstants.QUERY_UNEQUAL) + || operator.equals(NGSIConstants.QUERY_PATTERNOP) + || operator.equals(NGSIConstants.QUERY_NOTPATTERNOP)) { + result.append(charcount); + result.append("#> '{"); + result.append("https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'"); + applyOperator(result); + result.append(" OR "); + } + result.append('('); + result.append(charcount); + result.append("#>"); + if (operator.equals(NGSIConstants.QUERY_PATTERNOP) || operator.equals(NGSIConstants.QUERY_NOTPATTERNOP) + || operant.matches(DATE) || operant.matches(TIME) || operant.matches(DATETIME)) { + result.append(">"); + } + result.append(" '{"); + result.append("https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')"); + if (operant.matches(DATETIME)) { + result.append("::timestamp "); + } else if (operant.matches(DATE)) { + result.append("::date "); + } else if (operant.matches(TIME)) { + result.append("::time "); + } + applyOperator(result); + result.append(" OR "); + if (TIME_PROPS.contains(lastAttrib)) { + result.append('('); + result.append((char) (charcount - 1)); + result.append("#>>"); + result.append(" '{"); + result.append(lastAttrib); + result.append(",0,@value}')"); + + } else if (lastAttrib.equals(NGSIConstants.NGSI_LD_DATA_SET_ID) || lastAttrib.equals(NGSIConstants.NGSI_LD_INSTANCE_ID)) { + result.append('('); + result.append(charcount); + result.append("#>"); + if (operator.equals(NGSIConstants.QUERY_PATTERNOP) || operator.equals(NGSIConstants.QUERY_NOTPATTERNOP) + || operant.matches(DATE) || operant.matches(TIME) || operant.matches(DATETIME)) { + result.append(">"); + } + result.append(" '{"); + result.append("@id}')"); + } else { + result.append('('); + result.append(charcount); + result.append("#>"); + if (operator.equals(NGSIConstants.QUERY_PATTERNOP) || operator.equals(NGSIConstants.QUERY_NOTPATTERNOP) + || operant.matches(DATE) || operant.matches(TIME) || operant.matches(DATETIME)) { + result.append(">"); + } + result.append(" '{"); + result.append("@value}')"); + + } + + if (operant.matches(DATETIME)) { + result.append("::timestamp "); + } else if (operant.matches(DATE)) { + result.append("::date "); + } else if (operant.matches(TIME)) { + result.append("::time "); + } + applyOperator(result); + for (int i = 0; i < attribPath.size(); i++) { + result.append(')'); + } + + + /* + * StringBuilder attributeFilterProperty = new StringBuilder("(m.attrdata#"); + * StringBuilder attributeFilterRelationship = new StringBuilder("m.attrdata#"); + * String testValueTypeForPatternOp = new String( "jsonb_typeof(m.attrdata#>'{" + * + attribId + ",0," + NGSIConstants.NGSI_LD_HAS_VALUE + + * ",0,@value}') = 'string'"); StringBuilder testValueTypeForDateTime = new + * StringBuilder( "m.attrdata#>>'{" + attribId + ",0," + + * NGSIConstants.NGSI_LD_HAS_VALUE + ",0,@type}' = "); + * + * if (operator.equals(NGSIConstants.QUERY_PATTERNOP) || + * operator.equals(NGSIConstants.QUERY_NOTPATTERNOP) || operant.matches(DATE) || + * operant.matches(TIME) || operant.matches(DATETIME)) { + * attributeFilterProperty.append(">>"); + * attributeFilterRelationship.append(">>"); } else { + * attributeFilterProperty.append(">"); attributeFilterRelationship.append(">"); + * } + * + * attributeFilterProperty.append("'{" + attribId + ",0," + + * NGSIConstants.NGSI_LD_HAS_VALUE + ",0,@value}')"); + * attributeFilterRelationship.append("'{" + attribId + ",0," + + * NGSIConstants.NGSI_LD_HAS_OBJECT + ",0,@id}'"); if + * (operant.matches(DATETIME)) { attributeFilterProperty.append("::timestamp "); + * testValueTypeForDateTime.append("'" + NGSIConstants.NGSI_LD_DATE_TIME + "'"); + * } else if (operant.matches(DATE)) { + * attributeFilterProperty.append("::date "); + * testValueTypeForDateTime.append("'" + NGSIConstants.NGSI_LD_DATE + "'"); } + * else if (operant.matches(TIME)) { attributeFilterProperty.append("::time "); + * testValueTypeForDateTime.append("'" + NGSIConstants.NGSI_LD_TIME + "'"); } + * + * + * boolean useRelClause = applyOperator(attributeFilterProperty, + * attributeFilterRelationship); if (useRelClause) { result.append("((" + + * attributeFilterProperty.toString() + ") or (" + + * attributeFilterRelationship.toString() + "))"); } else { result.append("(" + + * attributeFilterProperty.toString()); if + * (operator.equals(NGSIConstants.QUERY_PATTERNOP) || + * operator.equals(NGSIConstants.QUERY_NOTPATTERNOP)) { result.append(" and " + + * testValueTypeForPatternOp); } if (operant.matches(DATE) || + * operant.matches(TIME) || operant.matches(DATETIME)) { result.append(" and " + + * testValueTypeForDateTime.toString()); } result.append(")"); } + */ + } + + // Only for testing; + public void setParamsResolver(ParamsResolver paramsResolver) { + this.paramsResolver = paramsResolver; + if (this.hasNext()) { + next.setParamsResolver(paramsResolver); + } + if (this.getFirstChild() != null) { + this.getFirstChild().setParamsResolver(paramsResolver); + } + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Relationship.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Relationship.java new file mode 100644 index 0000000000000000000000000000000000000000..da928073a73df6eefdbd8c433365178a50b23792 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Relationship.java @@ -0,0 +1,62 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.util.HashMap; +import java.util.List; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public class Relationship extends BaseProperty { + + private HashMap dataSetId2object; + + public Relationship() { + type = "Relationship"; + } + + public void finalize() throws Throwable { + + } + + public HashMap getEntries() { + return dataSetId2object; + } + + public void setObjects(HashMap objects) { + this.dataSetId2object = objects; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = super.hashCode(); + result = prime * result + ((dataSetId2object == null) ? 0 : dataSetId2object.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (!super.equals(obj)) + return false; + if (getClass() != obj.getClass()) + return false; + Relationship other = (Relationship) obj; + if (dataSetId2object == null) { + if (other.dataSetId2object != null) + return false; + } else if (!dataSetId2object.equals(other.dataSetId2object)) + return false; + return true; + } + + @Override + public boolean isMultiValue() { + return dataSetId2object.size() != 1; + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/RelationshipEntry.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/RelationshipEntry.java new file mode 100644 index 0000000000000000000000000000000000000000..d6d9ab792d07fe1658e9805fdbf8a46643ed5ac2 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/RelationshipEntry.java @@ -0,0 +1,29 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.util.UUID; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; + +public class RelationshipEntry extends BaseEntry{ + private URI object; + + + + public RelationshipEntry(String dataSetId, URI object) { + super(dataSetId); + this.type = NGSIConstants.NGSI_LD_RELATIONSHIP; + this.object = object; + } + + public URI getObject() { + return object; + } + public void setObject(URI object) { + this.object = object; + } + + + + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/RestResponse.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/RestResponse.java new file mode 100644 index 0000000000000000000000000000000000000000..10a58d47edeed9805118869eeac10f10b53bf40d --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/RestResponse.java @@ -0,0 +1,68 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.Arrays; +import java.util.List; + +import org.springframework.http.HttpStatus; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; + +/** + * @version 1.0 + * @created 09-Jul-2018 + */ + +public class RestResponse { + private String type; + @JsonIgnore + private HttpStatus status; + private String title; + private String details; + + /* + * public RestResponse(HttpStatus status, String title, String detail) { + * super(); this.status = status; this.title = title; this.details = detail; + * this.type = status.getReasonPhrase(); } + */ + + public RestResponse(ErrorType errorType, String details) { + this.status = HttpStatus.valueOf(errorType.getCode()); + this.title = errorType.getMessage(); + this.details = details; + this.type = errorType.getErrorType(); + } + + public RestResponse(ResponseException exception) { + super(); + this.status = exception.getHttpStatus(); + this.title = exception.getError().getMessage(); + this.details = exception.getMessage(); + this.type = exception.getError().getErrorType(); + } + + public String getType() { + return type; + } + + public HttpStatus getStatus() { + return status; + } + + public String getTitle() { + return title; + } + + public String getDetail() { + return details; + } + + public byte[] toJsonBytes() { + String result = "{\n\t\"type\":\"" + type + "\",\n\t\"title\":\"" + title + "\",\n\t\"details\":\"" + details + + "\"\n}"; + return result.getBytes(); + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Subscription.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Subscription.java new file mode 100644 index 0000000000000000000000000000000000000000..6b5fc7f8431f2fdc3c11545180e8d51fedbbcbfc --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Subscription.java @@ -0,0 +1,157 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.util.List; +import java.util.Map; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:23 + */ +public class Subscription extends Query { + + private String description; + private Long expires; + private URI id; + private String name; + private NotificationParam notification; + private String status; + private Integer throttling = 0; + private Integer timeInterval = 0; + private String type; + private Boolean internal = false; + private QueryTerm queryTerm; + private boolean isActive = true; + + public Subscription() { + super(); + } + + public Subscription(Map customFlags, List attributeNames, List entities, + String ldContext, LDGeoQuery ldGeoQuery, String ldQuery, LDTemporalQuery ldTempQuery, + List requestorList, String description, Long expires, URI id, String name, + NotificationParam notification, String status, Integer throttling, Integer timeInterval, String type) { + super(customFlags, attributeNames, entities, ldContext, ldGeoQuery, ldQuery, ldTempQuery, requestorList); + this.description = description; + this.expires = expires; + this.id = id; + this.name = name; + this.notification = notification; + this.status = status; + this.throttling = throttling; + this.timeInterval = timeInterval; + this.type = type; + } + + public Boolean isInternal() { + return internal; + } + + public void setInternal(Boolean internal) { + this.internal = internal; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public Long getExpires() { + return expires; + } + + public void setExpires(Long expires) { + this.expires = expires; + } + + public URI getId() { + return id; + } + + public void setId(URI id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public NotificationParam getNotification() { + return notification; + } + + public void setNotification(NotificationParam notification) { + this.notification = notification; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public Integer getThrottling() { + return throttling; + } + + public void setThrottling(Integer throttling) { + this.throttling = throttling; + } + + public Integer getTimeInterval() { + return timeInterval; + } + + public void setTimeInterval(Integer timeInterval) { + this.timeInterval = timeInterval; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public QueryTerm getQueryTerm() { + return queryTerm; + } + + public void setQueryTerm(QueryTerm queryTerm) { + this.queryTerm = queryTerm; + } + + public void finalize() throws Throwable { + + } + + public boolean isActive() { + return isActive; + } + + public void setActive(boolean isActive) { + this.isActive = isActive; + } + + @Override + public String toString() { + return "Subscription [description=" + description + ", expires=" + expires + ", id=" + id + ", name=" + name + + ", notification=" + notification + ", status=" + status + ", throttling=" + throttling + + ", timeInterval=" + timeInterval + ", type=" + type + ", internal=" + internal + ", queryTerm=" + + queryTerm + ", attributeNames=" + attributeNames + ", entities=" + entities + ", ldContext=" + + ldContext + ", ldGeoQuery=" + ldGeoQuery + ", ldQuery=" + ldQuery + ", ldTempQuery=" + ldTempQuery + + ", requestorList=" + requestorList + ", customFlags=" + customFlags + "]"; + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/SubscriptionRequest.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/SubscriptionRequest.java new file mode 100644 index 0000000000000000000000000000000000000000..5ff8369dc4f9a77cfeee2c0edadc5a257a3fa307 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/SubscriptionRequest.java @@ -0,0 +1,27 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.List; + +public class SubscriptionRequest { + Subscription subscription; + List context; + + public SubscriptionRequest(Subscription subscription, List context) { + this.subscription = subscription; + this.context = context; + } + public Subscription getSubscription() { + return subscription; + } + public void setSubscription(Subscription subscription) { + this.subscription = subscription; + } + public List getContext() { + return context; + } + public void setContext(List context) { + this.context = context; + } + + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/TemporalEntityStorageKey.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/TemporalEntityStorageKey.java new file mode 100644 index 0000000000000000000000000000000000000000..4c37fe767515a53ac0f0219cfd7dc36b015d61d8 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/TemporalEntityStorageKey.java @@ -0,0 +1,92 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import com.google.gson.annotations.Expose; +import com.google.gson.annotations.SerializedName; + +public class TemporalEntityStorageKey { + + + public TemporalEntityStorageKey(String entityId) { + super(); + this.entityId = entityId; + } + + @SerializedName("entityId") + @Expose + private String entityId; + @SerializedName("entityType") + @Expose + private String entityType; + @SerializedName("entityCreatedAt") + @Expose + private String entityCreatedAt; + @SerializedName("entityModifiedAt") + @Expose + private String entityModifiedAt; + @SerializedName("attributeId") + @Expose + private String attributeId; + @SerializedName("instanceId") + @Expose + private String instanceId; + @SerializedName("overwriteOp") + @Expose + private Boolean overwriteOp; + + public String getEntityId() { + return entityId; + } + + public void setEntityId(String entityId) { + this.entityId = entityId; + } + + public String getEntityType() { + return entityType; + } + + public void setEntityType(String entityType) { + this.entityType = entityType; + } + + public String getEntityCreatedAt() { + return entityCreatedAt; + } + + public void setEntityCreatedAt(String entityCreatedAt) { + this.entityCreatedAt = entityCreatedAt; + } + + public String getEntityModifiedAt() { + return entityModifiedAt; + } + + public void setEntityModifiedAt(String entityModifiedAt) { + this.entityModifiedAt = entityModifiedAt; + } + + public String getAttributeId() { + return attributeId; + } + + public void setAttributeId(String attributeId) { + this.attributeId = attributeId; + } + + public String getInstanceId() { + return instanceId; + } + + public void setInstanceId(String instanceId) { + this.instanceId = instanceId; + } + + public Boolean getOverwriteOp() { + return overwriteOp; + } + + public void setOverwriteOp(Boolean overwriteOp) { + this.overwriteOp = overwriteOp; + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/TimeInterval.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/TimeInterval.java new file mode 100644 index 0000000000000000000000000000000000000000..7e9b88bcfbed227379f43f3f323af1a447a7810d --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/TimeInterval.java @@ -0,0 +1,39 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.util.Date; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:23 + */ +public class TimeInterval { + + private Long start; + private Long stop; + + public TimeInterval(){ + + } + + public Long getStart() { + return start; + } + + public void setStart(Long start) { + this.start = start; + } + + public Long getStop() { + return stop; + } + + public void setStop(Long stop) { + this.stop = stop; + } + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/TypedValue.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/TypedValue.java new file mode 100644 index 0000000000000000000000000000000000000000..e95ff48bce474b5a07604588c4bfd6c83d753998 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/TypedValue.java @@ -0,0 +1,29 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +public class TypedValue { + private String type; + private Object value; + + + + + public TypedValue(String type, Object value) { + this.type = type; + this.value = value; + } + public String getType() { + return type; + } + public void setType(String type) { + this.type = type; + } + public Object getValue() { + return value; + } + public void setValue(Object value) { + this.value = value; + } + + + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Update.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Update.java new file mode 100644 index 0000000000000000000000000000000000000000..49933b9e0d432a80002645e73d8a9a709337fbf7 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/Update.java @@ -0,0 +1,25 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import java.net.URI; +import java.util.List; +import java.util.Map; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:23 + */ +public class Update extends BaseEntityModificationOperation { + + + + public Update(Map customFlags, List data, URI id, LDContext ldContext) { + super(customFlags, data, id, ldContext); + // TODO Auto-generated constructor stub + } + + public void finalize() throws Throwable { + + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/UpdateResult.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/UpdateResult.java new file mode 100644 index 0000000000000000000000000000000000000000..c05e338171b606a706f36c1e8639f7c4b5c34fd9 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/datatypes/UpdateResult.java @@ -0,0 +1,72 @@ +package eu.neclab.ngsildbroker.commons.datatypes; + +import com.fasterxml.jackson.databind.JsonNode; + +public class UpdateResult { + private JsonNode jsonToAppend; + private JsonNode updatedJsonFields;// = new ArrayList(); + private JsonNode finalNode; + private boolean status = false; + private byte[] json; + private byte[] jsonWithoutSysAttrs; + + public boolean getUpdateResult() { + return jsonToAppend.size() == updatedJsonFields.size(); + } + + public UpdateResult(JsonNode jsonToAppend,JsonNode appendedJsonFields) { + super(); + this.jsonToAppend = jsonToAppend; + this.updatedJsonFields=appendedJsonFields; + } + + + public JsonNode getFinalNode() { + return finalNode; + } + + public void setFinalNode(JsonNode finalNode) { + this.finalNode = finalNode; + } + + public JsonNode getJsonToAppend() { + return jsonToAppend; + } + + public void setJsonToAppend(JsonNode jsonToAppend) { + this.jsonToAppend = jsonToAppend; + } + + public JsonNode getAppendedJsonFields() { + return updatedJsonFields; + } + + public void setAppendedJsonFields(JsonNode updatedJsonFields) { + this.updatedJsonFields = updatedJsonFields; + } + + public boolean getStatus() { + return status; + } + + public void setStatus(boolean status) { + this.status = status; + } + + public byte[] getJson() { + return json; + } + + public void setJson(byte[] json) { + this.json = json; + } + + public byte[] getJsonWithoutSysAttrs() { + return jsonWithoutSysAttrs; + } + + public void setJsonWithoutSysAttrs(byte[] jsonWithoutSysAttrs) { + this.jsonWithoutSysAttrs = jsonWithoutSysAttrs; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/ErrorType.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/ErrorType.java new file mode 100644 index 0000000000000000000000000000000000000000..c81b2acb28ab8eada70c959fc1ce4c7c3c3917d1 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/ErrorType.java @@ -0,0 +1,59 @@ +package eu.neclab.ngsildbroker.commons.enums; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public enum ErrorType { + + None(200, "none", "none"), + InvalidRequest(406, "http://uri.etsi.org/ngsi-ld/errors/InvalidRequest", "Invalid request."), + BadRequestData(400, "http://uri.etsi.org/ngsi-ld/errors/BadRequestData", "Bad Request Data."), + TooComplexQuery(403, "http://uri.etsi.org/ngsi-ld/errors/TooComplexQuery", "Too complex query"), + TooManyResults(403, "http://uri.etsi.org/ngsi-ld/errors/TooManyResults ", "Too many results"), + NotFound(404, "http://uri.etsi.org/ngsi-ld/errors/ResourceNotFound", "Resource not found."), + + // ResourceNotFound(404,"Resource not found."), + // MethodNotAllowed(405,"Method not allowed"), + + AlreadyExists(409, "http://uri.etsi.org/ngsi-ld/errors/AlreadyExists", "Already exists."), + LenghtRequired(411, "HTTP request provided by a client does not define the “Content-Length†HTTP header", + "HTTP request provided by a client does not define the “Content-Length†HTTP header"), + RequestEntityTooLarge(413, "HTTP input data stream is too large i.e. too many bytes", + "HTTP input data stream is too large i.e. too many bytes"), + UnsupportedMediaType(415, "Unsupported Media type", "Unsupported Media type"), + OperationNotSupported(422, "http://uri.etsi.org/ngsi-ld/errors/OperationNotSupported", "Operation not supported."), + UnprocessableEntity(422, "Unprocessable Entity.", "Unprocessable Entity."), + + InternalError(500, "http://uri.etsi.org/ngsi-ld/errors/InternalError", "Internal error"), + KafkaWriteError(500, "http://uri.etsi.org/ngsi-ld/errors/InternalError", "Kafka write exception."), + MultiStatus(207, "Multi status result", "Multi status result"); + + private final int code; + private final String message; + private String errorType; + + private ErrorType(int code, String errorType, String message) { + this.code = code; + this.message = message; + this.errorType = errorType; + } + + public int getCode() { + return code; + } + + public String getErrorType() { + return errorType; + } + + public String getMessage() { + return message; + } + + public String toString() { + return "[" + code + " : " + message + " ]"; + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/Format.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/Format.java new file mode 100644 index 0000000000000000000000000000000000000000..6a85a41f3c70a3388cc68b8b8e0fc0e9a8c41a22 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/Format.java @@ -0,0 +1,12 @@ +package eu.neclab.ngsildbroker.commons.enums; + + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public enum Format { + keyValues, + normalized +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/Geometry.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/Geometry.java new file mode 100644 index 0000000000000000000000000000000000000000..88f4a1c5fbd926b2b38c7a27ae586efc2ca8ce5d --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/Geometry.java @@ -0,0 +1,12 @@ +package eu.neclab.ngsildbroker.commons.enums; + + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:22 + */ +public enum Geometry { + Point, + Polygon +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/TemporalRelation.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/TemporalRelation.java new file mode 100644 index 0000000000000000000000000000000000000000..eafcad1c9fdbcd0dfb8c89e574f686e87bdc92b6 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/TemporalRelation.java @@ -0,0 +1,13 @@ +package eu.neclab.ngsildbroker.commons.enums; + + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:23 + */ +public enum TemporalRelation { + before, + after, + between +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/TriggerReason.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/TriggerReason.java new file mode 100644 index 0000000000000000000000000000000000000000..bd355c7486350ebf0af0c333f894d949b75a8d8b --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/enums/TriggerReason.java @@ -0,0 +1,13 @@ +package eu.neclab.ngsildbroker.commons.enums; + + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:13:23 + */ +public enum TriggerReason { + updated, + newlyMatching, + noLongerMatching +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/AlreadyExistException.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/AlreadyExistException.java new file mode 100644 index 0000000000000000000000000000000000000000..7071cb51b47f43a1c131bbe5bbc0e8cff4613552 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/AlreadyExistException.java @@ -0,0 +1,19 @@ +package eu.neclab.ngsildbroker.commons.exceptions; + +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +/** + * @version 1.0 + * @created 09-Jul-2018 + */ +public class AlreadyExistException extends ResponseException { + + private static final long serialVersionUID = 1L; + + /*public AlreadyExistException(String message) { + super(HttpStatus.CONFLICT, message); + }*/ + + public AlreadyExistException(ErrorType error) { + super(error); + } +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/BadRequestException.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/BadRequestException.java new file mode 100644 index 0000000000000000000000000000000000000000..ab222a7a7d74ab8a9cc12a6ae8be3438e1e6770a --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/BadRequestException.java @@ -0,0 +1,18 @@ +package eu.neclab.ngsildbroker.commons.exceptions; + +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +/** + * @version 1.0 + * @created 09-Jul-2018 + */ +public class BadRequestException extends ResponseException{ + + private static final long serialVersionUID = 1L; + + + + public BadRequestException() { + super(ErrorType.BadRequestData); + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/HttpErrorResponseException.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/HttpErrorResponseException.java new file mode 100644 index 0000000000000000000000000000000000000000..afefa677b58113464479e0fc0a2fa197809f7eda --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/HttpErrorResponseException.java @@ -0,0 +1,21 @@ + +package eu.neclab.ngsildbroker.commons.exceptions; + +import org.apache.http.client.HttpResponseException; + + +public class HttpErrorResponseException extends HttpResponseException { + private static final long serialVersionUID = -5656867439394559485L; + + /** + * Instantiate the exception. + * + * @param statusCode + * the status code that lead to this error + * @param statusReason + * the reason in the status message + */ + public HttpErrorResponseException(int statusCode, String statusReason) { + super(statusCode, statusReason); + } +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/KafkaWriteException.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/KafkaWriteException.java new file mode 100644 index 0000000000000000000000000000000000000000..748f0da5e8ca5f59a621bd92887880a8b3891fa7 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/KafkaWriteException.java @@ -0,0 +1,19 @@ +package eu.neclab.ngsildbroker.commons.exceptions; + +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +/** + * @version 1.0 + * @created 09-Jul-2018 + */ +public class KafkaWriteException extends ResponseException{ + + private static final long serialVersionUID = 1L; + + /*public KafkaWriteException(String message) { + super(HttpStatus.INTERNAL_SERVER_ERROR, message); + }*/ + + public KafkaWriteException(ErrorType error) { + super(error); + } +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/NotFoundException.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/NotFoundException.java new file mode 100644 index 0000000000000000000000000000000000000000..d72407a49f589d8059033d80b5a49caf190b6304 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/NotFoundException.java @@ -0,0 +1,19 @@ +package eu.neclab.ngsildbroker.commons.exceptions; + +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +/** + * @version 1.0 + * @date 9-Jul-2018 + */ +public class NotFoundException extends ResponseException{ + private static final long serialVersionUID = 1L; + + public NotFoundException(ErrorType error) { + super(error); + + } + /*public NotFoundException(String message) { + super(HttpStatus.NOT_FOUND, message); + }*/ + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/ResponseException.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/ResponseException.java new file mode 100644 index 0000000000000000000000000000000000000000..4959f3f2beb55aeef1299b0d0f8323363b5de6eb --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/exceptions/ResponseException.java @@ -0,0 +1,59 @@ +package eu.neclab.ngsildbroker.commons.exceptions; + +import org.springframework.http.HttpStatus; + +import eu.neclab.ngsildbroker.commons.enums.ErrorType; + +/** + * @version 1.0 + * @created 09-Jul-2018 + */ +public class ResponseException extends Exception{ + private static final long serialVersionUID = 1L; + + private HttpStatus httpStatus; + private ErrorType error; + + public ResponseException(String message) { + super(message); + } + + /*public ResponseException(int code,String message) { + super(message); + this.httpStatus=HttpStatus.valueOf(code); + }*/ + + public ResponseException(ErrorType error) { + super(error.getMessage()); + this.error=error; + this.httpStatus=HttpStatus.valueOf(error.getCode()); + } + + public ResponseException(ErrorType error,String errorMessage) { + super(errorMessage); + this.error=error; + this.httpStatus=HttpStatus.valueOf(error.getCode()); + } + + /*public ResponseException(HttpStatus httpStatus,String message) { + super(message); + this.httpStatus=httpStatus; + }*/ + + public HttpStatus getHttpStatus() { + return httpStatus; + } + + public ErrorType getError() { + return error; + } + + @Override + public String toString() { + return super.getMessage(); + } + + + + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/AppendHandler.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/AppendHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..eb065bcf619a9b02ce56fef614d5391ff706aff3 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/AppendHandler.java @@ -0,0 +1,19 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +import eu.neclab.ngsildbroker.commons.datatypes.Append; +import eu.neclab.ngsildbroker.commons.datatypes.AppendResult; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:39:57 + */ +public interface AppendHandler { + + /** + * + * @param append + */ + public AppendResult append(Append append); + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/CSourceNotificationHandler.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/CSourceNotificationHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..6eb15c6ad3e892931fe64c356e51a64caad93ca2 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/CSourceNotificationHandler.java @@ -0,0 +1,19 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +import eu.neclab.ngsildbroker.commons.datatypes.CSourceNotification; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:40:08 + */ +public interface CSourceNotificationHandler { + + /** + * + * @param regInfo + */ + public void notify(CSourceNotification notification, Subscription sub); + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/CSourceQueryHandler.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/CSourceQueryHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..76c6a2feae7f390df1ae2706529799f69e162d42 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/CSourceQueryHandler.java @@ -0,0 +1,19 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +import eu.neclab.ngsildbroker.commons.datatypes.CSourceQueryResult; +import eu.neclab.ngsildbroker.commons.datatypes.Query; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:40:02 + */ +public interface CSourceQueryHandler { + + /** + * + * @param query + */ + public CSourceQueryResult query(Query query); + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/ConfigurationHandler.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/ConfigurationHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..e49b94dda38b1ddf3e61f1c0d18fdd826cbb0bb2 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/ConfigurationHandler.java @@ -0,0 +1,19 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +import java.util.Map; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:40:03 + */ +public interface ConfigurationHandler { + + /** + * + * @param bundleId + * @param config + */ + public void configure(String bundleId, Map config); + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/ContextRegistryInterface.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/ContextRegistryInterface.java new file mode 100644 index 0000000000000000000000000000000000000000..66c4f381833e81e4e09932a27ea60da995aea80a --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/ContextRegistryInterface.java @@ -0,0 +1,66 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +import java.net.URI; + +import eu.neclab.ngsildbroker.commons.datatypes.CSourceQueryResult; +import eu.neclab.ngsildbroker.commons.datatypes.CSourceRegistration; +import eu.neclab.ngsildbroker.commons.datatypes.Query; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:40:06 + */ +public interface ContextRegistryInterface extends SubscriptionManager, CSourceQueryHandler { + + /** + * + * @param id + */ + public void deleteCSourceRegistration(URI id); + + /** + * + * @param query + */ + public CSourceQueryResult query(Query query); + + /** + * + * @param id + */ + public Subscription querySubscription(URI id); + + /** + * + * @param source + */ + public URI registerCSource(CSourceRegistration source); + + /** + * + * @param subscription + */ + public URI subscribe(Subscription subscription); + + /** + * + * @param id + */ + public void unsubscribe(URI id); + + /** + * + * @param id + * @param update + */ + public void updateCSourceRegistry(URI id, CSourceRegistration update); + + /** + * + * @param subscription + */ + public Subscription updateSubscription(Subscription subscription); + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/CreateHandler.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/CreateHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..6fb00fc1b6f994d5ea8133143fcaa7b01327727e --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/CreateHandler.java @@ -0,0 +1,20 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +import eu.neclab.ngsildbroker.commons.datatypes.Create; +import eu.neclab.ngsildbroker.commons.datatypes.CreateResult; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:39:59 + */ +public interface CreateHandler { + + /** + * + * @param create + + */ + public CreateResult create(Create create); + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/DeleteHandler.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/DeleteHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..1ca2ef1cafffa99cf6a7dc3a06b25ff2eaa21913 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/DeleteHandler.java @@ -0,0 +1,19 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +import eu.neclab.ngsildbroker.commons.datatypes.Delete; +import eu.neclab.ngsildbroker.commons.datatypes.DeleteResult; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:40:01 + */ +public interface DeleteHandler { + + /** + * + * @param delete + */ + public DeleteResult delete(Delete delete); + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/EntityHandlerInterface.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/EntityHandlerInterface.java new file mode 100644 index 0000000000000000000000000000000000000000..4229b118f467a5da6e137c10d834fd26b693a031 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/EntityHandlerInterface.java @@ -0,0 +1,12 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:40:04 + */ +public interface EntityHandlerInterface extends UpdateHandler, DeleteHandler, AppendHandler, CreateHandler { + + + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/EntitySubscriptionHandlerInterface.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/EntitySubscriptionHandlerInterface.java new file mode 100644 index 0000000000000000000000000000000000000000..534ed16423073873259460cd3aff9e1a6db1dbfa --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/EntitySubscriptionHandlerInterface.java @@ -0,0 +1,55 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +import java.net.URI; +import java.util.List; + +import eu.neclab.ngsildbroker.commons.datatypes.CSourceRegistration; +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:40:07 + */ +public interface EntitySubscriptionHandlerInterface extends SubscriptionManager, InternalNotificationHandler, CSourceNotificationHandler { + + + + /** + * + * @param entities + */ + public void newData(List entities); + + /** + * + * @param regInfo + */ + public void newSource(CSourceRegistration regInfo); + + /** + * + * @param id + */ + public Subscription querySubscription(URI id); + + /** + * + * @param subscription + */ + public URI subscribe(Subscription subscription); + + /** + * + * @param id + */ + public void unsubscribe(URI id); + + /** + * + * @param subscription + */ + public Subscription updateSubscription(Subscription subscription); + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/HistoryInterface.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/HistoryInterface.java new file mode 100644 index 0000000000000000000000000000000000000000..f5b6500216567df80df5b417a51a903a8d96bbe2 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/HistoryInterface.java @@ -0,0 +1,51 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +import eu.neclab.ngsildbroker.commons.datatypes.Append; +import eu.neclab.ngsildbroker.commons.datatypes.AppendResult; +import eu.neclab.ngsildbroker.commons.datatypes.Create; +import eu.neclab.ngsildbroker.commons.datatypes.CreateResult; +import eu.neclab.ngsildbroker.commons.datatypes.Delete; +import eu.neclab.ngsildbroker.commons.datatypes.DeleteResult; +import eu.neclab.ngsildbroker.commons.datatypes.Query; +import eu.neclab.ngsildbroker.commons.datatypes.QueryResult; +import eu.neclab.ngsildbroker.commons.datatypes.Update; +import eu.neclab.ngsildbroker.commons.datatypes.UpdateResult; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:40:06 + */ +public interface HistoryInterface extends StorageInterface { + + /** + * + * @param append + */ + public AppendResult append(Append append); + + /** + * + * @param create + */ + public CreateResult create(Create create); + + /** + * + * @param delete + */ + public DeleteResult delete(Delete delete); + + /** + * + * @param query + */ + public QueryResult query(Query query); + + /** + * + * @param update + */ + public UpdateResult update(Update update); + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/InternalNotificationHandler.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/InternalNotificationHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..2225fd6f17e8b5ebba7df2da3d2eb92cf78b7e38 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/InternalNotificationHandler.java @@ -0,0 +1,20 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +import java.util.List; + +import eu.neclab.ngsildbroker.commons.datatypes.Entity; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:40:08 + */ +public interface InternalNotificationHandler { + + /** + * + * @param entities + */ + public void newData(List entities); + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/NotificationHandler.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/NotificationHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..87619766f6c3ddfa79df264d3e2248e9abd972cd --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/NotificationHandler.java @@ -0,0 +1,23 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +import java.net.URI; +import java.util.List; +import java.util.Map; + +import eu.neclab.ngsildbroker.commons.datatypes.Notification; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:40:09 + */ +public interface NotificationHandler { + + /** + * + * @param notification + * @param ldContext + */ + public void notify(Notification notification, URI callback, String acceptHeader, String subId, List context, int throttling, Map clientSettings); + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/QueryHandler.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/QueryHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..d62f98e8e9f0ad3f2fd7f8f343f24abace689266 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/QueryHandler.java @@ -0,0 +1,19 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +import eu.neclab.ngsildbroker.commons.datatypes.Query; +import eu.neclab.ngsildbroker.commons.datatypes.QueryResult; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:40:02 + */ +public interface QueryHandler { + + /** + * + * @param query + */ + public QueryResult query(Query query); + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/StorageInterface.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/StorageInterface.java new file mode 100644 index 0000000000000000000000000000000000000000..0ea91a9212a1b3ce79af981907a1214a033e7c75 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/StorageInterface.java @@ -0,0 +1,13 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:40:05 + */ +public interface StorageInterface extends UpdateHandler, DeleteHandler, AppendHandler, QueryHandler, CreateHandler { + + + + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/SubscriptionManager.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/SubscriptionManager.java new file mode 100644 index 0000000000000000000000000000000000000000..1cafd8815145345c4366ed8254730356ebc476a0 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/SubscriptionManager.java @@ -0,0 +1,45 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +import java.net.URI; +import java.util.List; + +import eu.neclab.ngsildbroker.commons.datatypes.Notification; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.datatypes.SubscriptionRequest; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:40:00 + */ +public interface SubscriptionManager { + + + + /** + * + * @param subRequest + * @throws AlreadyExistsException + */ + public URI subscribe(SubscriptionRequest subRequest) throws ResponseException; + + /** + * + * @param id + */ + public void unsubscribe(URI id) throws ResponseException; + + /** + * + * @param subscription + */ + public Subscription updateSubscription(SubscriptionRequest subscription) throws ResponseException; + + public List getAllSubscriptions(int limit); + + public Subscription getSubscription(String subscriptionId) throws ResponseException; + + public void remoteNotify(String id, Notification notification); + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/UpdateHandler.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/UpdateHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..519370199404075d0382a625c29ac6a61ec22d4d --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/interfaces/UpdateHandler.java @@ -0,0 +1,19 @@ +package eu.neclab.ngsildbroker.commons.interfaces; + +import eu.neclab.ngsildbroker.commons.datatypes.Update; +import eu.neclab.ngsildbroker.commons.datatypes.UpdateResult; + +/** + * @author hebgen + * @version 1.0 + * @created 11-Jun-2018 11:40:01 + */ +public interface UpdateHandler { + + /** + * + * @param update + */ + public UpdateResult update(Update update); + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ldcontext/AtContext.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ldcontext/AtContext.java new file mode 100644 index 0000000000000000000000000000000000000000..d7f9701f5de20955041a6e912273a3228f67a5d9 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ldcontext/AtContext.java @@ -0,0 +1,79 @@ +package eu.neclab.ngsildbroker.commons.ldcontext; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import javax.annotation.PostConstruct; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.kafka.support.KafkaHeaders; +import org.springframework.messaging.Message; +import org.springframework.stereotype.Component; +import com.google.gson.Gson; + +import eu.neclab.ngsildbroker.commons.constants.KafkaConstants; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; + + +@Component +public class AtContext { + + + private HashMap> id2Contextes = new HashMap>(); + private Gson gson = new Gson(); + + + @Autowired + KafkaOps kafkaOps; + + @Autowired + AtContextProducerChannel producerChannel; + + + @SuppressWarnings("unchecked") + @PostConstruct + private void loadMap() { + Map contextes = kafkaOps.pullFromKafka(KafkaConstants.ATCONTEXT_TOPIC); + for(Entry entry: contextes.entrySet()) { + this.id2Contextes.put(entry.getKey(), gson.fromJson(new String(entry.getValue()), List.class)); + } + } + + public void addContext(String id, List context) throws ResponseException { + + this.id2Contextes.put(id, context); + saveContext(id); + } + + public Map getAllContextes() { + return kafkaOps.pullFromKafka(KafkaConstants.ATCONTEXT_TOPIC); + } + @SuppressWarnings("unchecked") + public List getContextes(String id){ +// byte[] kafkaBytes = kafkaOps.pullFromKafka(KafkaConstants.ATCONTEXT_TOPIC).get(id); +// if(kafkaBytes == null || kafkaBytes == AppConstants.NULL_BYTES) { +// return new HashMap(); +// }else { +// return gson.fromJson(new String(kafkaBytes), Map.class); +// } + + return this.id2Contextes.get(id); + } + + + + private void saveContext(String id) throws ResponseException { + kafkaOps.pushToKafka(producerChannel.atContextWriteChannel(), id.getBytes(), gson.toJson(id2Contextes.get(id)).getBytes()); + + } + + @KafkaListener(topics = "ATCONTEXT", groupId = "atCon") + public void listenContext(Message message) { + List context = gson.fromJson(new String(message.getPayload()), List.class); + String key = kafkaOps.getMessageKey(message); + id2Contextes.put(key, context); + } +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ldcontext/AtContextProducerChannel.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ldcontext/AtContextProducerChannel.java new file mode 100644 index 0000000000000000000000000000000000000000..c5eb05dec33bc0d212312976189d46b6674bf462 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ldcontext/AtContextProducerChannel.java @@ -0,0 +1,17 @@ +package eu.neclab.ngsildbroker.commons.ldcontext; + +import org.springframework.cloud.stream.annotation.Output; +import org.springframework.messaging.MessageChannel; + + + +public interface AtContextProducerChannel { + + + public String atContextWriteChannel="ATCONTEXT"; + + + @Output(atContextWriteChannel) + MessageChannel atContextWriteChannel(); + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ldcontext/CompactedJson.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ldcontext/CompactedJson.java new file mode 100644 index 0000000000000000000000000000000000000000..37a79e872498593d2daf49e53b13678c4c02336e --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ldcontext/CompactedJson.java @@ -0,0 +1,40 @@ +package eu.neclab.ngsildbroker.commons.ldcontext; + +public class CompactedJson { + + private String compacted; + private String compactedWithContext; + private String contextUrl; + + + + + public String getCompacted() { + return compacted; + } + + public void setCompacted(String compacted) { + this.compacted = compacted; + } + + public String getCompactedWithContext() { + return compactedWithContext; + } + + public void setCompactedWithContext(String compactedWithContext) { + this.compactedWithContext = compactedWithContext; + } + + public String getContextUrl() { + return contextUrl; + } + + public void setContextUrl(String contextUrl) { + this.contextUrl = contextUrl; + } + + + + + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ldcontext/ContextResolverBasic.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ldcontext/ContextResolverBasic.java new file mode 100644 index 0000000000000000000000000000000000000000..b0b9522180028ef8b46046c367c0d38ee1736a2a --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ldcontext/ContextResolverBasic.java @@ -0,0 +1,1256 @@ +package eu.neclab.ngsildbroker.commons.ldcontext; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.time.Instant; +import java.time.temporal.TemporalAccessor; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import javax.annotation.PostConstruct; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +import com.fasterxml.jackson.core.JsonGenerationException; +import com.fasterxml.jackson.core.JsonParseException; +import com.github.jsonldjava.core.JsonLdError; +import com.github.jsonldjava.core.JsonLdOptions; +import com.github.jsonldjava.core.JsonLdProcessor; +import com.github.jsonldjava.core.RDFDataset; +import com.github.jsonldjava.core.RDFDatasetUtils; +import com.github.jsonldjava.utils.JsonUtils; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.EndPoint; +import eu.neclab.ngsildbroker.commons.datatypes.EntityInfo; +import eu.neclab.ngsildbroker.commons.datatypes.GeoRelation; +import eu.neclab.ngsildbroker.commons.datatypes.LDGeoQuery; +import eu.neclab.ngsildbroker.commons.datatypes.LDQuery; +import eu.neclab.ngsildbroker.commons.datatypes.NotificationParam; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.enums.Format; +import eu.neclab.ngsildbroker.commons.enums.Geometry; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; +import eu.neclab.ngsildbroker.commons.tools.SerializationTools; + +@Component +public class ContextResolverBasic { + private final static Logger logger = LogManager.getLogger(ContextResolverBasic.class); + private URI CORE_CONTEXT_URL; + @Value("${context.coreurl:https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld}") + private String CORE_CONTEXT_URL_STR; + + private String USED_CORE_CONTEXT_URL_STR; + // private URI DEFAULT_CONTEXT_URL; + + @Autowired + KafkaOps kafkaOps; + + @Autowired + AtContextProducerChannel producerChannel; + + private JsonLdOptions defaultOptions = new JsonLdOptions(); + + @Value("${atcontext.baseurl:http://localhost:9090/ngsi-ld/contextes/}") + private String AT_CONTEXT_BASE_URL; + @Value("${selfhostcorecontext:http://localhost:9090/corecontext}") + private String SELF_HOST_CORE_CONTEXT_URL; + private HttpUtils httpUtils = HttpUtils.getInstance(this); + private Map CORE_CONTEXT; + // private Map DEFAULT_CONTEXT; + private Map BASE_CONTEXT = new HashMap(); + Pattern attributeChecker; + Pattern subscriptionParser; + private static final String IS_FULL_VALID = "ajksd7868"; + + @PostConstruct + private void setup() { + try { + CORE_CONTEXT_URL = new URI(CORE_CONTEXT_URL_STR); + String json = httpUtils.doGet(CORE_CONTEXT_URL); + CORE_CONTEXT = (Map) ((Map) JsonUtils.fromString(json)).get("@context"); + BASE_CONTEXT.putAll(CORE_CONTEXT); + USED_CORE_CONTEXT_URL_STR = CORE_CONTEXT_URL_STR; + } catch (URISyntaxException e) { + // left empty intentionally + // controlled uri + throw new AssertionError( + CORE_CONTEXT_URL + " is not a valid uri. Aborting! core context has to be available"); + } catch (IOException e) { + // core context not reachable + try { + CORE_CONTEXT_URL = new URI(SELF_HOST_CORE_CONTEXT_URL); + String json = httpUtils.doGet(CORE_CONTEXT_URL); + CORE_CONTEXT = (Map) ((Map) JsonUtils.fromString(json)).get("@context"); + BASE_CONTEXT.putAll(CORE_CONTEXT); + USED_CORE_CONTEXT_URL_STR = SELF_HOST_CORE_CONTEXT_URL; + } catch (URISyntaxException e1) { + // left empty intentionally + // controlled uri + throw new AssertionError(SELF_HOST_CORE_CONTEXT_URL + + " is not a valid uri. Aborting! core context has to be available"); + } catch (IOException e1) { + throw new AssertionError( + "Neither the default core context is reachable nore the internal webserver. Aborting! core context has to be available"); + } + } + } + + public static void main(String[] args) throws Exception { + ContextResolverBasic bla = new ContextResolverBasic(); + ArrayList contextLinks = new ArrayList(); + String body = "{ \"id\": \"urn:ngsi-ld:Building:store000000001\", \"type\": \"Building\", \"category\": { \"type\": \"Property\", \"value\": [\"commercial\"] }, \"address\": { \"type\": \"Property\", \"value\": { \"streetAddress\": \"Bornholmer Straße 65\", \"addressRegion\": \"Berlin\", \"addressLocality\": \"Prenzlauer Berg\", \"postalCode\": \"10439\" }, \"verified\": { \"type\": \"Property\", \"value\": true } }, \"location\": { \"type\": \"GeoProperty\", \"value\": { \"type\": \"Point\", \"coordinates\": [13.3986, 52.5547] } }, \"name\": { \"type\": \"Property\", \"value\": \"Bösebrücke Einkauf\" }, \"@context\": [ \"https://fiware.github.io/data-models/context.jsonld\", \"https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld\" ] }"; + /**/ + } + + public ContextResolverBasic(String atContextBaseUrl) { + this(); + if (atContextBaseUrl != null) { + this.AT_CONTEXT_BASE_URL = atContextBaseUrl; + } + } + + public ContextResolverBasic() { + super(); + StringBuilder regex = new StringBuilder(); + regex.append(NGSIConstants.NGSI_LD_FORBIDDEN_KEY_CHARS_REGEX); + for (String payloadItem : NGSIConstants.NGSI_LD_PAYLOAD_KEYS) { + regex.append("|(" + payloadItem.replace("/", "\\/").replace(".", "\\.") + ")"); + } + attributeChecker = Pattern.compile(regex.toString()); + regex = new StringBuilder(); + regex.append(NGSIConstants.NGSI_LD_FORBIDDEN_KEY_CHARS_REGEX); + for (String payloadItem : NGSIConstants.NGSI_LD_SUBSCRIPTON_PAYLOAD_KEYS) { + regex.append("|(" + payloadItem.replace("/", "\\/").replace(".", "\\.") + ")"); + } + subscriptionParser = Pattern.compile(regex.toString()); + + } + + public String expand(String body, List contextLinks, boolean check, int endPoint) throws ResponseException { + try { + Object obj = JsonUtils.fromString(body); + if (obj instanceof Map) { + return expand((Map) obj, contextLinks, check, endPoint); + } + if (obj instanceof List) { + List list = (List) obj; + if (list.isEmpty()) { + throw new ResponseException(ErrorType.InvalidRequest); + } + StringBuilder result = new StringBuilder("["); + for (Object listObj : list) { + result.append(expand((Map) listObj, contextLinks, check, endPoint)); + result.append(","); + } + result.setCharAt(result.length() - 1, ']'); + return result.toString(); + } + throw new ResponseException(ErrorType.InvalidRequest); + } catch (IOException e) { + e.printStackTrace(); + throw new ResponseException(ErrorType.InvalidRequest); + } + } + + private Map> expand(Map json, List contextLinks) { + Object tempCtx = json.get(NGSIConstants.JSON_LD_CONTEXT); + List context; + if (tempCtx == null) { + context = new ArrayList(); + } else if (tempCtx instanceof List) { + context = (List) tempCtx; + } else { + context = new ArrayList(); + context.add(tempCtx); + } + + if (contextLinks != null && !contextLinks.isEmpty()) { + context.addAll(contextLinks); + } + ArrayList usedContext = new ArrayList(); + + usedContext.addAll(context); + usedContext.remove(CORE_CONTEXT_URL_STR); + usedContext.add(BASE_CONTEXT); + + json.put(NGSIConstants.JSON_LD_CONTEXT, usedContext); + + Map> result = new HashMap>(); + result.put(1, JsonLdProcessor.expand(json)); + result.put(2, usedContext); + return result; + + } + + public String expand(Map json, List contextLinks, boolean check, int endPoint) + throws ResponseException { + try { + // if(! + Map> expanded = expand(json, contextLinks); + if (check) { + preFlightCheck(expanded.get(1), expanded.get(2), true, endPoint, false); + } + // ) { + // throw new ResponseException(ErrorType.BadRequestData,"Entity without an + // attribute is not allowed"); + // } +// protectGeoProps(expanded, usedContext); +// protectLocationFromSubs(expanded, usedContext); + if (expanded.get(1).isEmpty()) { + return ""; + } + return JsonUtils.toPrettyString(expanded.get(1).get(0)); + } catch (IOException e) { + e.printStackTrace(); + throw new ResponseException(ErrorType.InvalidRequest, e.getLocalizedMessage()); + } + + } + + private boolean preFlightCheck(List expanded, List usedContext, boolean root, int calledEndpoint, + boolean customKey) throws JsonGenerationException, ResponseException, IOException { + boolean hasAttributes = false; + for (Object entry : expanded) { + if (entry instanceof Map) { + hasAttributes = preFlightCheck((Map) entry, usedContext, root, calledEndpoint, + customKey) || hasAttributes; + } else if (entry instanceof List) { + hasAttributes = preFlightCheck((List) entry, usedContext, root, calledEndpoint, customKey) + || hasAttributes; + } else { + // don't care for now i think + } + } + return hasAttributes; + } + + private boolean preFlightCheck(Map objMap, List usedContext, boolean root, + int calledEndpoint, boolean customKey) throws ResponseException, JsonGenerationException, IOException { + + Object value = null; + + boolean hasValue = false; + boolean hasObject = false; + boolean hasAtValue = false; + boolean hasAttributes = false; + boolean isProperty = false; + boolean isRelationship = false; + boolean isDatetime = false; + boolean isGeoProperty = false; + int keyType; + for (Entry mapEntry : objMap.entrySet()) { + String key = mapEntry.getKey(); + Object mapValue = mapEntry.getValue(); + keyType = checkKey(key, attributeChecker); + // (@id)|(@type)|(@context)|(https://uri.etsi.org/ngsi-ld/default-context/)|(https://uri.etsi.org/ngsi-ld/hasValue)|(https://uri.etsi.org/ngsi-ld/hasObject)|(https://uri.etsi.org/ngsi-ld/location)|(https://uri.etsi.org/ngsi-ld/createdAt)|(https://uri.etsi.org/ngsi-ld/modifiedAt)|(https://uri.etsi.org/ngsi-ld/observedAt)|(https://uri.etsi.org/ngsi-ld/observationSpace)|(https://uri.etsi.org/ngsi-ld/operationSpace)|(https://uri.etsi.org/ngsi-ld/attributes)|(https://uri.etsi.org/ngsi-ld/information)|(https://uri.etsi.org/ngsi-ld/instanceId)|(https://uri.etsi.org/ngsi-ld/coordinates)|(https://uri.etsi.org/ngsi-ld/idPattern)|(https://uri.etsi.org/ngsi-ld/entities)|(https://uri.etsi.org/ngsi-ld/geometry)|(https://uri.etsi.org/ngsi-ld/geoQ)|(https://uri.etsi.org/ngsi-ld/accept)|(https://uri.etsi.org/ngsi-ld/uri)|(https://uri.etsi.org/ngsi-ld/endpoint)|(https://uri.etsi.org/ngsi-ld/format)|(https://uri.etsi.org/ngsi-ld/notification)|(https://uri.etsi.org/ngsi-ld/q)|(https://uri.etsi.org/ngsi-ld/watchedAttributes)|(https://uri.etsi.org/ngsi-ld/name)|(https://uri.etsi.org/ngsi-ld/throttling)|(https://uri.etsi.org/ngsi-ld/timeInterval)|(https://uri.etsi.org/ngsi-ld/expires)|(https://uri.etsi.org/ngsi-ld/status)|(https://uri.etsi.org/ngsi-ld/description)|(https://uri.etsi.org/ngsi-ld/georel)|(https://uri.etsi.org/ngsi-ld/timestamp)|(https://uri.etsi.org/ngsi-ld/start)|(https://uri.etsi.org/ngsi-ld/end)|(https://uri.etsi.org/ngsi-ld/subscriptionId)|(https://uri.etsi.org/ngsi-ld/notifiedAt)|(https://uri.etsi.org/ngsi-ld/data)|(https://uri.etsi.org/ngsi-ld/internal)|(https://uri.etsi.org/ngsi-ld/lastNotification)|(https://uri.etsi.org/ngsi-ld/lastFailure + // )|(https://uri.etsi.org/ngsi-ld/lastSuccess)|(https://uri.etsi.org/ngsi-ld/timesSent)|([\<\"\'\=\;\(\)\>\?\*]) + if (keyType == 1) { + throw new ResponseException(ErrorType.BadRequestData, + "Forbidden characters in JSON key. Forbidden Characters are " + + NGSIConstants.NGSI_LD_FORBIDDEN_KEY_CHARS); + } else if (keyType == -1 || keyType == 5 || keyType == 9) { + if (keyType == 9) { + if (protectRegistrationLocationEntry(mapValue, mapEntry, usedContext)) { + continue; + } + } + if (mapValue instanceof Map) { + hasAttributes = preFlightCheck((Map) mapValue, usedContext, false, calledEndpoint, + true) || hasAttributes; + } else if (mapValue instanceof List) { + hasAttributes = preFlightCheck((List) mapValue, usedContext, false, calledEndpoint, true) + || hasAttributes; + } + } else if (keyType == 2) { + // ID + validateUri((String) mapValue); + hasValue = true; + } else if (keyType == 3) { + // TYPE + String type = null; + if (mapValue instanceof List) { + type = validateUri((String) ((List) mapValue).get(0)); + } else if (mapValue instanceof String) { + type = validateUri((String) mapValue); + } + if (type == null) { + continue; + } + switch (type) { + case NGSIConstants.NGSI_LD_GEOPROPERTY: + isGeoProperty = true; + break; + case NGSIConstants.NGSI_LD_PROPERTY: + isProperty = true; + break; + case NGSIConstants.NGSI_LD_RELATIONSHIP: + isRelationship = true; + break; + case NGSIConstants.NGSI_LD_DATE_TIME: + isDatetime = true; + break; + default: + break; + } + } else if (keyType == 6) { + value = checkHasValue(mapValue); + hasValue = true; + } else if (keyType == 7) { + checkHasObject(mapValue); + hasObject = true; + } else if (keyType == 8) { + hasAtValue = true; + } + } + if ((calledEndpoint == AppConstants.ENTITIES_URL_ID || calledEndpoint == AppConstants.HISTORY_URL_ID) + && (isProperty && !hasValue)) { + throw new ResponseException(ErrorType.BadRequestData, "You can't have properties without a value"); + } + if ((calledEndpoint == AppConstants.ENTITIES_URL_ID || calledEndpoint == AppConstants.HISTORY_URL_ID) + && (isRelationship && !hasObject)) { + throw new ResponseException(ErrorType.BadRequestData, "You can't have relationships without an object"); + } + if ((calledEndpoint == AppConstants.ENTITIES_URL_ID || calledEndpoint == AppConstants.HISTORY_URL_ID) + && (isDatetime && !hasAtValue)) { + throw new ResponseException(ErrorType.BadRequestData, "You can't have an empty datetime entry"); + } + + if ((calledEndpoint == AppConstants.ENTITIES_URL_ID || calledEndpoint == AppConstants.HISTORY_URL_ID) + && (customKey && !((isProperty && hasValue) || (isRelationship && hasObject) + || (isDatetime && hasAtValue) || (isGeoProperty && hasValue)))) { + throw new ResponseException(ErrorType.BadRequestData, "Unknown entry"); + } + if (isGeoProperty) { + protectGeoProp(objMap, value, usedContext); + } + return hasAttributes; + } + + private void checkHasObject(Object mapValue) throws ResponseException { + if (mapValue == null) { + throw new ResponseException(ErrorType.BadRequestData); + } + if (mapValue instanceof List) { + List tempList = (List) mapValue; + if(tempList.size() != 1) { + throw new ResponseException(ErrorType.BadRequestData, "Only one entry per relationship is allowed"); + } + } + + + } + + public Subscription expandSubscription(String body, List contextLinks) throws ResponseException { + Subscription subscription = new Subscription(); + + Map> expanded; + try { + expanded = expand((Map) JsonUtils.fromString(body), contextLinks); + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData, "Failed to parse document. JSON is invalid"); + } + Map rawSub = (Map) expanded.get(1).get(0); + Object value = null; + boolean hasEntities = false; + boolean hasWatchedAttributes = false; + boolean hasNotificaition = false; + + int keyType; + for (Entry mapEntry : rawSub.entrySet()) { + String key = mapEntry.getKey(); + Object mapValue = mapEntry.getValue(); + keyType = checkKey(key, subscriptionParser); + /* + * // { JSON_LD_ID, JSON_LD_TYPE, JSON_LD_CONTEXT, NGSI_LD_ENTITIES, + * NGSI_LD_ID_PATTERN, NGSI_LD_GEO_QUERY, NGSI_LD_NOTIFICATION, + * NGSI_LD_ATTRIBUTES, NGSI_LD_ENDPOINT, NGSI_LD_ACCEPT, NGSI_LD_URI, + * NGSI_LD_FORMAT, NGSI_LD_QUERY, NGSI_LD_WATCHED_ATTRIBUTES, + * NGSI_LD_TIMES_SEND, NGSI_LD_THROTTLING, NGSI_LD_TIME_INTERVAL, + * NGSI_LD_TIMESTAMP_END, NGSI_LD_TIMESTAMP_START } + */ + if (keyType == 1) { + throw new ResponseException(ErrorType.BadRequestData, + "Forbidden characters in JSON key. Forbidden Characters are " + + NGSIConstants.NGSI_LD_FORBIDDEN_KEY_CHARS); + } else if (keyType == -1) { + throw new ResponseException(ErrorType.BadRequestData, "Unkown entry for subscription"); + } else if (keyType == 2) { + // ID + try { + subscription.setId(new URI(validateUri((String) mapValue))); + } catch (URISyntaxException e) { + // Left empty intentionally is already checked + } + } else if (keyType == 3) { + // TYPE + String type = null; + if (mapValue instanceof List) { + type = validateUri((String) ((List) mapValue).get(0)); + } else if (mapValue instanceof String) { + type = validateUri((String) mapValue); + } + if (type == null || !type.equals(NGSIConstants.NGSI_LD_SUBSCRIPTION)) { + throw new ResponseException(ErrorType.BadRequestData, "No type or type is not Subscription"); + } + subscription.setType(type); + } else if (keyType == 5) { + // Entities + List entities = new ArrayList(); + List> list = (List>) mapValue; + boolean hasType; + for (Map entry : list) { + EntityInfo entityInfo = new EntityInfo(); + hasType = false; + for (Entry entitiesEntry : entry.entrySet()) { + switch (entitiesEntry.getKey()) { + case NGSIConstants.JSON_LD_ID: + try { + entityInfo.setId(new URI(validateUri((String) entitiesEntry.getValue()))); + } catch (URISyntaxException e) { + // Left empty intentionally is already checked + } + break; + case NGSIConstants.JSON_LD_TYPE: + hasType = true; + entityInfo.setType(validateUri((String) ((List) entitiesEntry.getValue()).get(0))); + break; + case NGSIConstants.NGSI_LD_ID_PATTERN: + entityInfo.setIdPattern( + (String) ((Map) ((List) entitiesEntry.getValue()).get(0)) + .get(NGSIConstants.JSON_LD_VALUE)); + break; + default: + throw new ResponseException(ErrorType.BadRequestData, "Unknown entry for entities"); + } + } + if (!hasType) { + throw new ResponseException(ErrorType.BadRequestData, "Entities entry needs type"); + } + hasEntities = true; + entities.add(entityInfo); + } + subscription.setEntities(entities); + } else if (keyType == 7) { + try { + LDGeoQuery ldGeoQuery = getGeoQuery((Map) ((List) mapValue).get(0)); + subscription.setLdGeoQuery(ldGeoQuery); + } catch (Exception e) { + logger.error(e); + throw new ResponseException(ErrorType.BadRequestData, "Failed to parse geoQ"); + } + // geoQ + + } else if (keyType == 8) { + // NGSI_LD_NOTIFICATION + try { + NotificationParam notification = getNotificationParam( + (Map) ((List) mapValue).get(0)); + subscription.setNotification(notification); + hasNotificaition = true; + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData, "Failed to parse notification parameter.\n" + e.getMessage()); + } + } else if (keyType == 14) { + // NGSI_LD_QUERY + + try { + subscription.setLdQuery( + (String) ((List>) mapValue).get(0).get(NGSIConstants.JSON_LD_VALUE)); + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData, "Failed to parse geoQ"); + } + } else if (keyType == 15) { + // NGSI_LD_WATCHED_ATTRIBUTES + try { + subscription.setAttributeNames(getAttribs((List>) mapValue)); + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData, + "Failed to parse watched attributes " + mapValue); + } + } else if (keyType == 17) { + // THROTTELING + try { + subscription.setThrottling( + (Integer) ((List>) mapValue).get(0).get(NGSIConstants.JSON_LD_VALUE)); + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData, "Failed to parse throtteling"); + } + } else if (keyType == 18) { + // TIMEINTERVALL + try { + subscription.setTimeInterval( + (Integer) ((List>) mapValue).get(0).get(NGSIConstants.JSON_LD_VALUE)); + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData, "Failed to parse timeinterval"); + } + } else if (keyType == 19) { + // EXPIRES + try { + subscription.setExpires(SerializationTools.date2Long( + (String) ((List>) mapValue).get(0).get(NGSIConstants.JSON_LD_VALUE))); + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData, "Failed to parse expires"); + } + } else if (keyType == 20) { + // STATUS + try { + subscription.setStatus( + (String) ((List>) mapValue).get(0).get(NGSIConstants.JSON_LD_VALUE)); + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData, "Failed to parse status"); + } + } else if (keyType == 21) { + // DESCRIPTION + try { + subscription.setDescription( + (String) ((List>) mapValue).get(0).get(NGSIConstants.JSON_LD_VALUE)); + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData, "Failed to parse status"); + } + } else if (keyType == 22) { + // isActive + try { + subscription.setActive( + (Boolean) ((List>) mapValue).get(0).get(NGSIConstants.JSON_LD_VALUE)); + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData, "Failed to parse active state"); + } + } + + } + + if (!hasEntities && !hasWatchedAttributes) { + throw new ResponseException(ErrorType.BadRequestData, "You have to specify watched attributes or entities"); + } + if (!hasNotificaition) { + throw new ResponseException(ErrorType.BadRequestData, "You have to specify notification"); + } + + return subscription; + } + + private NotificationParam getNotificationParam(Map map) throws Exception { + // Default accept + String accept = AppConstants.NGB_APPLICATION_JSONLD; + Format format = Format.normalized; + List watchedAttribs = new ArrayList(); + String mqttVersion= null; + Integer qos = null; + NotificationParam notifyParam = new NotificationParam(); + MapnotifierInfo = new HashMap(); + for (Entry entry : map.entrySet()) { + switch (entry.getKey()) { + case NGSIConstants.NGSI_LD_ATTRIBUTES: + watchedAttribs = getAttribs((List>) entry.getValue()); + notifyParam.setAttributeNames(watchedAttribs); + break; + case NGSIConstants.NGSI_LD_ENDPOINT: + EndPoint endPoint = new EndPoint(); + for (Entry endPointEntry : ((List>) entry.getValue()).get(0) + .entrySet()) { + switch (endPointEntry.getKey()) { + case NGSIConstants.NGSI_LD_ACCEPT: + accept = ((List>) endPointEntry.getValue()).get(0) + .get(NGSIConstants.JSON_LD_VALUE); + break; + case NGSIConstants.NGSI_LD_URI: + URI endPointURI = validateSubEndpoint(((List>) endPointEntry.getValue()) + .get(0).get(NGSIConstants.JSON_LD_VALUE)); + endPoint.setUri(endPointURI); + break; + + case NGSIConstants.NGSI_LD_NOTIFIERINFO: + + for (Entry endPointNotifier : ((List>) endPointEntry + .getValue()).get(0).entrySet()) { + switch (endPointNotifier.getKey()) { + case NGSIConstants.NGSI_LD_MQTT_VERSION: + mqttVersion = validateSubNotifierInfoMqttVersion( + ((List>) endPointNotifier.getValue()).get(0) + .get(NGSIConstants.JSON_LD_VALUE)); + notifierInfo.put(NGSIConstants.MQTT_VERSION, mqttVersion); + break; + case NGSIConstants.NGSI_LD_MQTT_QOS: + qos = validateSubNotifierInfoQos( + ((List>) endPointNotifier.getValue()).get(0) + .get(NGSIConstants.JSON_LD_VALUE)); + notifierInfo.put(NGSIConstants.MQTT_QOS, String.valueOf(qos)); + break; + default: + notifierInfo.put(NGSIConstants.MQTT_VERSION, NGSIConstants.DEFAULT_MQTT_VERSION); + notifierInfo.put(NGSIConstants.MQTT_QOS,String.valueOf(NGSIConstants.DEFAULT_MQTT_QOS)); + } + } + endPoint.setNotifierInfo(notifierInfo); + break; + + default: + throw new ResponseException(ErrorType.BadRequestData, "Unkown entry for endpoint"); + } + } + endPoint.setAccept(accept); + //endPoint.setNotifierInfo(notifierInfo); + notifyParam.setEndPoint(endPoint); + break; + case NGSIConstants.NGSI_LD_FORMAT: + String formatString = (String) ((List>) entry.getValue()).get(0) + .get(NGSIConstants.JSON_LD_VALUE); + if (formatString.equalsIgnoreCase("keyvalues")) { + format = Format.keyValues; + } + break; + default: + throw new ResponseException(ErrorType.BadRequestData, "Unkown entry for notification"); + } + + } + notifyParam.setFormat(format); + return notifyParam; + } + + private List getAttribs(List> entry) throws ResponseException { + ArrayList watchedAttribs = new ArrayList(); + for (Map attribEntry : entry) { + String temp = (String) attribEntry.get(NGSIConstants.JSON_LD_ID); + if (temp.matches(NGSIConstants.NGSI_LD_FORBIDDEN_KEY_CHARS_REGEX)) { + throw new ResponseException(ErrorType.BadRequestData, "Invalid character in attribute names"); + } + watchedAttribs.add(temp); + } + if (watchedAttribs.isEmpty()) { + throw new ResponseException(ErrorType.BadRequestData, "Empty watched attributes entry"); + } + return watchedAttribs; + } + + private URI validateSubEndpoint(String string) throws ResponseException { + URI uri; + try { + uri = new URI(string); + if (Arrays.binarySearch(NGSIConstants.VALID_SUB_ENDPOINT_SCHEMAS, uri.getScheme()) == -1) { + throw new ResponseException(ErrorType.BadRequestData, "Unsupport endpoint scheme"); + } + } catch (URISyntaxException e) { + throw new ResponseException(ErrorType.BadRequestData, "Invalid endpoint"); + } + return uri; + } + + private LDGeoQuery getGeoQuery(Map map) throws Exception { + LDGeoQuery geoQuery = new LDGeoQuery(); + List> jsonCoordinates = (List>) map + .get(NGSIConstants.NGSI_LD_COORDINATES); + ArrayList coordinates = new ArrayList(); + + for (Map entry : jsonCoordinates) { + Object tempValue = entry.get(NGSIConstants.JSON_LD_VALUE); + if (tempValue instanceof Double) { + coordinates.add((Double) tempValue); + } else if (tempValue instanceof Integer) { + coordinates.add(((Integer) tempValue).doubleValue()); + } else if (tempValue instanceof Long) { + coordinates.add(((Long) tempValue).doubleValue()); + } else { + throw new ResponseException(ErrorType.BadRequestData, "Failed to parse coordinates"); + } + + } + geoQuery.setCoordinates(coordinates); + String geometry = (String) ((Map) ((List) map.get(NGSIConstants.NGSI_LD_GEOMETRY)).get(0)) + .get(NGSIConstants.JSON_LD_VALUE); + if (geometry.equalsIgnoreCase("point")) { + geoQuery.setGeometry(Geometry.Point); + } else if (geometry.equalsIgnoreCase("polygon")) { + geoQuery.setGeometry(Geometry.Polygon); + } + String geoRelString = (String) ((Map) ((List) map.get(NGSIConstants.NGSI_LD_GEO_REL)).get(0)) + .get(NGSIConstants.JSON_LD_VALUE); + String[] relSplit = geoRelString.split(";"); + GeoRelation geoRel = new GeoRelation(); + geoRel.setRelation(relSplit[0]); + for (int i = 1; i < relSplit.length; i++) { + String[] temp = relSplit[i].split("=="); + Object distance; + try { + distance = Integer.parseInt(temp[1]); + } catch (NumberFormatException e) { + distance = Double.parseDouble(temp[1]); + } + if (temp[0].equalsIgnoreCase("maxDistance")) { + + geoRel.setMaxDistance(distance); + } else if (temp[0].equalsIgnoreCase("minDistance")) { + geoRel.setMinDistance(distance); + } + } + geoQuery.setGeoRelation(geoRel); + return geoQuery; + } + + private int checkKey(String key, Pattern p) { + Matcher m = p.matcher(key); + int result = 10000; + while (m.find()) { + for (int i = 1; i <= m.groupCount(); i++) { + if (m.group(i) == null) { + continue; + } + if (result > i) { + result = i; + break; + } + } + } + return result; + } + + private Object checkHasValue(Object mapValue) throws ResponseException { + if (mapValue == null) { + throw new ResponseException(ErrorType.BadRequestData); + } + if (mapValue instanceof List) { + List tempList = (List) mapValue; + if (!tempList.isEmpty()) + return tempList.get(0); + } + return null; + } + + private boolean protectRegistrationLocationEntry(Object mapValue, Entry mapEntry, + List usedContext) throws JsonGenerationException, IOException { + if (((List) mapValue).get(0) instanceof Map) { + Map temp = (Map) ((List) mapValue).get(0); + if (temp.get(NGSIConstants.JSON_LD_TYPE) != null) { + if (!((List) temp.get(NGSIConstants.JSON_LD_TYPE)).get(0).equals(NGSIConstants.NGSI_LD_GEOPROPERTY)) { + // we are in a location entry of registry as this is not a geo property + mapEntry.setValue(getProperGeoJson(mapValue, usedContext)); + return true; + } + } + } + return false; + } + + private String validateUri(String mapValue) throws ResponseException { + try { + if (!new URI(mapValue).isAbsolute()) { + throw new ResponseException(ErrorType.BadRequestData, "id is not a URI"); + } + return mapValue; + } catch (URISyntaxException e) { + throw new ResponseException(ErrorType.BadRequestData, "id is not a URI"); + } + + } + + private Object getProperGeoJson(Object value, List usedContext) + throws JsonGenerationException, IOException { + Map compactedFull = JsonLdProcessor.compact(value, usedContext, defaultOptions); + compactedFull.remove(NGSIConstants.JSON_LD_CONTEXT); + String geoType = (String) compactedFull.get(NGSIConstants.GEO_JSON_TYPE); + List geoValues = (List) compactedFull.get(NGSIConstants.GEO_JSON_COORDINATES); + switch (geoType) { + case NGSIConstants.GEO_TYPE_POINT: + // nothing to be done here point is ok like this + break; + case NGSIConstants.GEO_TYPE_LINESTRING: + ArrayList containerList = new ArrayList(); + for (int i = 0; i < geoValues.size(); i += 2) { + ArrayList container = new ArrayList(); + container.add(geoValues.get(i)); + container.add(geoValues.get(i + 1)); + containerList.add(container); + } + compactedFull.put(NGSIConstants.GEO_JSON_COORDINATES, containerList); + break; + + case NGSIConstants.GEO_TYPE_POLYGON: + ArrayList topLevelContainerList = new ArrayList(); + ArrayList polyContainerList = new ArrayList(); + for (int i = 0; i < geoValues.size(); i += 2) { + ArrayList container = new ArrayList(); + container.add(geoValues.get(i)); + container.add(geoValues.get(i + 1)); + polyContainerList.add(container); + } + topLevelContainerList.add(polyContainerList); + compactedFull.put(NGSIConstants.GEO_JSON_COORDINATES, topLevelContainerList); + break; + case NGSIConstants.GEO_TYPE_MULTI_POLYGON: + ArrayList multitopLevelContainerList = new ArrayList(); + ArrayList multimidLevelContainerList = new ArrayList(); + ArrayList multipolyContainerList = new ArrayList(); + for (int i = 0; i < geoValues.size(); i += 2) { + ArrayList container = new ArrayList(); + container.add(geoValues.get(i)); + container.add(geoValues.get(i + 1)); + multipolyContainerList.add(container); + } + multimidLevelContainerList.add(multipolyContainerList); + multitopLevelContainerList.add(multimidLevelContainerList); + compactedFull.put(NGSIConstants.GEO_JSON_COORDINATES, multitopLevelContainerList); + break; + default: + break; + } + String proctedValue = JsonUtils.toString(compactedFull); + // temp.replace("\"", "\\\""); + ArrayList tempList = new ArrayList(); + Map tempMap = new HashMap(); + tempMap.put(NGSIConstants.JSON_LD_VALUE, proctedValue); + tempList.add(tempMap); + return tempList; + } + + private void protectGeoProp(Map objMap, Object value, List usedContext) + throws JsonGenerationException, IOException, ResponseException { + Object potentialStringValue = ((Map) value).get(NGSIConstants.JSON_LD_VALUE); + if (potentialStringValue != null) { + return; + } + + + Map compactedFull = JsonLdProcessor.compact(value, usedContext, defaultOptions); + compactedFull.remove(NGSIConstants.JSON_LD_CONTEXT); + String geoType = (String) compactedFull.get(NGSIConstants.GEO_JSON_TYPE); + //This is needed because one context could map from type which wouldn't work with the used context. + //Used context is needed because something could map point + //This is not good but new geo type will come so this can go away at some time + if(geoType == null) { + compactedFull = JsonLdProcessor.compact(value, CORE_CONTEXT, defaultOptions); + compactedFull.remove(NGSIConstants.JSON_LD_CONTEXT); + geoType = (String) compactedFull.get(NGSIConstants.GEO_JSON_TYPE); + + } + List geoValues = (List) compactedFull.get(NGSIConstants.GEO_JSON_COORDINATES); + Object entry1, entry2; + switch (geoType) { + case NGSIConstants.GEO_TYPE_POINT: + // nothing to be done here point is ok like this + entry1 = geoValues.get(0); + entry2 = geoValues.get(1); + if((!(entry1 instanceof Double) && !(entry1 instanceof Integer)) || (!(entry2 instanceof Double) && !(entry2 instanceof Integer))) { + throw new ResponseException(ErrorType.BadRequestData, "Provided coordinate entry is not a float value"); + } + break; + case NGSIConstants.GEO_TYPE_LINESTRING: + ArrayList containerList = new ArrayList(); + for (int i = 0; i < geoValues.size(); i += 2) { + ArrayList container = new ArrayList(); + entry1 = geoValues.get(i); + entry2 = geoValues.get(i + 1); + if((!(entry1 instanceof Double) && !(entry1 instanceof Integer)) || (!(entry2 instanceof Double) && !(entry2 instanceof Integer))) { + throw new ResponseException(ErrorType.BadRequestData, "Provided coordinate entry is not a float value"); + } + container.add(entry1); + container.add(entry2); + containerList.add(container); + } + compactedFull.put(NGSIConstants.GEO_JSON_COORDINATES, containerList); + break; + + case NGSIConstants.GEO_TYPE_POLYGON: + ArrayList topLevelContainerList = new ArrayList(); + ArrayList polyContainerList = new ArrayList(); + if(!geoValues.get(0).equals(geoValues.get(geoValues.size()-2)) || !geoValues.get(1).equals(geoValues.get(geoValues.size()-1))) { + throw new ResponseException(ErrorType.BadRequestData, "Polygon does not close"); + } + for (int i = 0; i < geoValues.size(); i += 2) { + ArrayList container = new ArrayList(); + entry1 = geoValues.get(i); + entry2 = geoValues.get(i + 1); + if((!(entry1 instanceof Double) && !(entry1 instanceof Integer)) || (!(entry2 instanceof Double) && !(entry2 instanceof Integer))) { + throw new ResponseException(ErrorType.BadRequestData, "Provided coordinate entry is not a float value"); + } + container.add(entry1); + container.add(entry2); + polyContainerList.add(container); + } + topLevelContainerList.add(polyContainerList); + compactedFull.put(NGSIConstants.GEO_JSON_COORDINATES, topLevelContainerList); + break; + case NGSIConstants.GEO_TYPE_MULTI_POLYGON: + ArrayList multiTopLevelContainerList = new ArrayList(); + ArrayList multiMidLevelContainerList = new ArrayList(); + ArrayList multiPolyContainerList = new ArrayList(); + for (int i = 0; i < geoValues.size(); i += 2) { + ArrayList container = new ArrayList(); + entry1 = geoValues.get(i); + entry2 = geoValues.get(i + 1); + if((!(entry1 instanceof Double) && !(entry1 instanceof Integer)) || (!(entry2 instanceof Double) && !(entry2 instanceof Integer))) { + throw new ResponseException(ErrorType.BadRequestData, "Provided coordinate entry is not a float value"); + } + container.add(entry1); + container.add(entry2);multiPolyContainerList.add(container); + } + multiMidLevelContainerList.add(multiPolyContainerList); + multiTopLevelContainerList.add(multiMidLevelContainerList); + + compactedFull.put(NGSIConstants.GEO_JSON_COORDINATES, multiTopLevelContainerList); + break; + + default: + break; + } + String proctedValue = JsonUtils.toString(compactedFull); + // temp.replace("\"", "\\\""); + ArrayList tempList = new ArrayList(); + Map tempMap = new HashMap(); + tempMap.put(NGSIConstants.JSON_LD_VALUE, proctedValue); + tempList.add(tempMap); + objMap.put(NGSIConstants.NGSI_LD_HAS_VALUE, tempList); + } + + private void unprotectGeoProps(Object json) throws JsonParseException, IOException { + if (json instanceof Map) { + unprotectGeoProps((Map) json); + } else if (json instanceof List) { + unprotectGeoProps((List) json); + } + + } + + private void unprotectGeoProps(Map objMap) throws JsonParseException, IOException { + boolean typeFound = false; + Object value = null; + for (Entry mapEntry : objMap.entrySet()) { + + String key = mapEntry.getKey(); + Object mapValue = mapEntry.getValue(); + if (key.equals(NGSIConstants.JSON_LD_CONTEXT)) { + continue; + } + if (key.equals(NGSIConstants.NGSI_LD_LOCATION_SHORT)) { + if (mapValue instanceof String) { + mapEntry.setValue(JsonUtils.fromString((String) mapValue)); + continue; + } + } + if (key.equals(NGSIConstants.NGSI_LD_WATCHED_ATTRIBUTES_SHORT) + || key.equals(NGSIConstants.NGSI_LD_ATTRIBUTES_SHORT) + || key.equals(NGSIConstants.NGSI_LD_ENTITIES_SHORT)) { + if (!(mapValue instanceof List)) { + ArrayList temp = new ArrayList(); + temp.add(mapValue); + mapEntry.setValue(temp); + } + continue; + } + + if (NGSIConstants.QUERY_PARAMETER_TYPE.equals(key) && (mapValue instanceof String)) { + + if (NGSIConstants.NGSI_LD_GEOPROPERTY_SHORT.equals(mapValue)) { + typeFound = true; + } + // if(tempObj instanceof Map) { + // if(NGSIConstants.NGSI_LD_GEOPROPERTY.equals(((Map)tempObj).get(NGSIConstants.JSON_LD_VALUE))){ + // typeFound = true; + // } + // } + + } else if (NGSIConstants.VALUE.equals(key)) { + value = mapValue; + } else { + if (mapValue instanceof Map) { + unprotectGeoProps((Map) mapValue); + } else if (mapValue instanceof List) { + unprotectGeoProps((List) mapValue); + } + } + } + + if (typeFound && value != null) { + + objMap.put(NGSIConstants.VALUE, JsonUtils.fromString((String) value)); + + } + + } + + private void unprotectGeoProps(List objList) throws JsonParseException, IOException { + for (Object entry : objList) { + if (entry instanceof Map) { + + unprotectGeoProps((Map) entry); + } else if (entry instanceof List) { + unprotectGeoProps((List) entry); + } else { + // don't care for now i think + } + } + + } + + /** + * @param body expanded json ld version + * @return rdf representation of entity/entities + * @throws ResponseException + */ + public String getRDF(String body) throws ResponseException { + try { + RDFDataset rdf = (RDFDataset) JsonLdProcessor.toRDF(JsonUtils.fromString(body), defaultOptions); + + return RDFDatasetUtils.toNQuads(rdf); + } catch (JsonParseException e) { + e.printStackTrace(); + throw new ResponseException(ErrorType.InvalidRequest); + } catch (IOException e) { + e.printStackTrace(); + throw new ResponseException(ErrorType.InvalidRequest); + } + } + + public CompactedJson compact(String body, List contextLinks) throws ResponseException { + try { + Object json = JsonUtils.fromString(body); + Map context = new HashMap(); + for (Object url : contextLinks) { + context.putAll(getRemoteContext((String) url)); + } + return compact(json, context, contextLinks); + } catch (JsonParseException e) { + e.printStackTrace(); + throw new ResponseException(ErrorType.InvalidRequest); + } catch (IOException e) { + e.printStackTrace(); + throw new ResponseException(ErrorType.InvalidRequest); + } + + } + + public CompactedJson compact(String body) throws ResponseException { + try { + // This should anyway never happen as we only compact for output an there will + // be no @context in that + // Object json = JsonUtils.fromString(body); + // List context; + // context = (List) json.getOrDefault(NGSIConstants.JSON_LD_CONTEXT, new + // ArrayList()); + // Map fullContext = getFullContext(context); + // json.remove(NGSIConstants.JSON_LD_CONTEXT); + return compact(JsonUtils.fromString(body), null, null); + } catch (JsonParseException e) { + e.printStackTrace(); + throw new ResponseException(ErrorType.InvalidRequest); + } catch (IOException e) { + e.printStackTrace(); + throw new ResponseException(ErrorType.InvalidRequest); + } + } + + private CompactedJson compact(Object json, Map context, List rawContext) + throws ResponseException { + // validateAndCleanContext(context); + CompactedJson result = new CompactedJson(); + int hash = json.hashCode(); + if (context.containsKey(IS_FULL_VALID)) { + result.setContextUrl((String) rawContext.get(0)); + } else { + rawContext.add(CORE_CONTEXT_URL_STR); + result.setContextUrl(generateAtContextServing(rawContext, hash)); + + } + context.remove(IS_FULL_VALID); + try { + cleanExpandedJson(json); + Map tempResult = JsonLdProcessor.compact(json, rawContext, defaultOptions); + unprotectGeoProps(tempResult); +// unprotectLocationFromRegistry(tempResult); + if (tempResult.containsKey("@graph")) { + // we are in a multiresult set + Object atContext = tempResult.get(NGSIConstants.JSON_LD_CONTEXT); + List> toCompact = (List>) tempResult.get("@graph"); + result.setCompacted(JsonUtils.toPrettyString(toCompact)); + for (Map entry : toCompact) { + entry.put(NGSIConstants.JSON_LD_CONTEXT, rawContext); + } + result.setCompactedWithContext(JsonUtils.toPrettyString(toCompact)); + } else { + + tempResult.put(NGSIConstants.JSON_LD_CONTEXT, rawContext); + result.setCompactedWithContext(JsonUtils.toPrettyString(tempResult)); + tempResult.remove(NGSIConstants.JSON_LD_CONTEXT); + result.setCompacted(JsonUtils.toPrettyString(tempResult)); + } + + } catch (IOException e) { + e.printStackTrace(); + throw new ResponseException(ErrorType.InvalidRequest, e.getMessage()); + } + return result; + } + + private void cleanExpandedJson(Object json) { + if(json instanceof List) { + List tempList = (List)json; + for(Object entry: tempList) { + cleanExpandedJson(entry); + } + }else if(json instanceof Map) { + Map tempMap = (Map)json; + Iterator it = tempMap.entrySet().iterator(); + while(it.hasNext()) { + Entry next = it.next(); + Object key = next.getKey(); + Object value = next.getValue(); + if(NGSIConstants.NGSI_LD_DATA_SET_ID.equals(key) && NGSIConstants.DEFAULT_DATA_SET_ID.equals(((Map)((List)value).get(0)).get(NGSIConstants.JSON_LD_ID))) { + it.remove(); + continue; + } + if(value instanceof Map || value instanceof List) { + cleanExpandedJson(value); + } + } + } + + } + + private String generateAtContextServing(List rawContext, int hash) { + ArrayList sorted = new ArrayList(); + if (rawContext != null && !rawContext.isEmpty()) { + sorted.addAll(rawContext); + } + // sorted.add(DEFAULT_CONTEXT_URL); + sorted.add(CORE_CONTEXT_URL); + try { + kafkaOps.pushToKafka(producerChannel.atContextWriteChannel(), (hash + "").getBytes(), + DataSerializer.toJson(sorted).getBytes()); + } catch (ResponseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + return AT_CONTEXT_BASE_URL + hash; + } + + private Map getFullContext(Object context) throws ResponseException { + Map result = new HashMap(); + if (context instanceof String) { + // just another url + String temp = (String) context; + if (temp.equals(CORE_CONTEXT_URL_STR)) { + result.put(IS_FULL_VALID, true); + } else { + // Don't download core again + result.putAll(getRemoteContext(temp)); + } + } else if (context instanceof List) { + for (Object entry : (List) context) { + if (entry instanceof String) { + // just another url + String temp = (String) entry; + if (temp.equals(CORE_CONTEXT_URL_STR)) { + result.put(IS_FULL_VALID, true); + } else { + try { + result.putAll(getRemoteContext(entry.toString())); + } catch (ResponseException e) { + // this can happen as not all "remote" entries are really remote contexts + // print error to show up in log and add "remote" as is to used context + logger.warn( + "Failed to get a remote context. This can happen as you can also just give a url. Check the error!" + + e.getMessage()); + } + } + } else if (entry instanceof Map) { + result.putAll(((Map) entry)); + } else if (entry instanceof List) { + result.putAll(getFullContext(entry)); + } else { + // Everything else should be illegal for @context + throw new ResponseException(ErrorType.BadRequestData, "Illegal state of @context"); + } + } + } else if (context instanceof Map) { + // @context entries, straight key value pairs + result.putAll(((Map) context)); + } else { + // Everything else should be illegal for @context + throw new ResponseException(ErrorType.BadRequestData, "Illegal state of @context"); + } + return result; + } + + private Map getRemoteContext(String url) throws ResponseException { + try { + + String body = httpUtils.doGet(new URI(url)); + Map remoteContext = (Map) JsonUtils.fromString(body); + Object temp = remoteContext.get(NGSIConstants.JSON_LD_CONTEXT); + if (temp == null) { + throw new ResponseException(ErrorType.BadRequestData, "Failed to get remote @context from " + url); + } + return getFullContext(temp); + + } catch (IOException | URISyntaxException e) { + throw new ResponseException(ErrorType.BadRequestData, "Failed to get remote @context from " + url); + } + + } + + private void validateAndCleanContext(Map contextToMerge) throws ResponseException { + if (contextToMerge == null) { + return; + } + Iterator> it = contextToMerge.entrySet().iterator(); + + while (it.hasNext()) { + Entry next = it.next(); + String key = next.getKey(); + Object value = next.getValue(); + if (BASE_CONTEXT.containsKey(key)) { + if (!value.equals(BASE_CONTEXT.get(key))) { + // Attemp to overwrite default context + throw new ResponseException(ErrorType.BadRequestData, + "Provided context entry " + key + "=" + value.toString() + " overrides base context"); + } + it.remove(); + continue; + } + + } + } + + private String validateSubNotifierInfoMqttVersion(String string) throws ResponseException { + try { + if (!Arrays.asList(NGSIConstants.VALID_MQTT_VERSION).contains(string)) { + throw new ResponseException(ErrorType.BadRequestData, "Unsupport Mqtt version"); + } + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData, "Unsupport Mqtt version"); + } + return string; + } + + private int validateSubNotifierInfoQos(Integer qos) throws ResponseException { + try { + if (!Arrays.asList(NGSIConstants.VALID_QOS).contains(qos)) { + throw new ResponseException(ErrorType.BadRequestData, "Unsupport Qos"); + } + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData, "Unsupport Qos"); + } + return qos; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ngsiqueries/ParamsResolver.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ngsiqueries/ParamsResolver.java new file mode 100644 index 0000000000000000000000000000000000000000..0c141f59aae00ae185ed21fe5f17741049807377 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ngsiqueries/ParamsResolver.java @@ -0,0 +1,330 @@ +package eu.neclab.ngsildbroker.commons.ngsiqueries; + +import java.io.IOException; +import java.time.LocalDateTime; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import javax.servlet.http.HttpServletRequest; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpHeaders; +import org.springframework.stereotype.Component; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.github.jsonldjava.utils.JsonUtils; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.EntityInfo; +import eu.neclab.ngsildbroker.commons.datatypes.GeoqueryRel; +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; + +@Component +public class ParamsResolver { + + private final static Logger logger = LogManager.getLogger(ParamsResolver.class); + + @Autowired + ContextResolverBasic contextResolver; + + @Autowired + ObjectMapper objectMapper; + + @Autowired + QueryParser queryParser; + + public QueryParams getQueryParamsFromUriQuery(Map ngsildQueryParams, List linkHeaders) + throws ResponseException { + return this.getQueryParamsFromUriQuery(ngsildQueryParams, linkHeaders, false); + } + + public List getQueryParamsFromSubscription(Subscription subscription) { + + ArrayList result = new ArrayList(); + for (EntityInfo entityInfo : subscription.getEntities()) { + QueryParams temp = new QueryParams(); + + if (subscription.getNotification().getAttributeNames() != null + && !subscription.getNotification().getAttributeNames().isEmpty()) { + temp.setAttrs(String.join(",", subscription.getNotification().getAttributeNames())); + } + temp.setType(entityInfo.getType()); + if (entityInfo.getId() != null) { + temp.setId(entityInfo.getId().toString()); + } + if (entityInfo.getIdPattern() != null) { + temp.setIdPattern(entityInfo.getIdPattern()); + } + if (subscription.getLdGeoQuery() != null) { + temp.setGeometry(subscription.getLdGeoQuery().getGeometry().name()); + temp.setGeoproperty(subscription.getLdGeoQuery().getGeoProperty()); + temp.setGeorel(new GeoqueryRel(subscription.getLdGeoQuery().getGeoRelation())); + StringBuilder builder = new StringBuilder(); + List coordinates = subscription.getLdGeoQuery().getCoordinates(); + for (int i = 0; i < coordinates.size(); i += 2) { + builder.append("["); + builder.append(coordinates.get(i)); + builder.append(","); + builder.append(coordinates.get(i + 1)); + builder.append("]"); + } + String coordinatesString; + switch (temp.getGeometry().toLowerCase()) { + case "polygon": + coordinatesString = "[[" + builder.toString() +"]]"; + break; + case "linestring": + coordinatesString = "[" + builder.toString() +"]"; + break; + case "point": + default: + coordinatesString = builder.toString(); + break; + } + temp.setCoordinates(coordinatesString); + } + if(subscription.getLdQuery() != null && !subscription.getLdQuery().isEmpty()) { + temp.setQ(subscription.getLdQuery()); + } + result.add(temp); + } + + return result; + } + + // new simplified format + public QueryParams getQueryParamsFromUriQuery(Map ngsildQueryParams, List linkHeaders, + boolean temporalEntityFormat) throws ResponseException { + logger.trace("call getStorageManagerJsonQuery method ::"); + try { + QueryParams qp = new QueryParams(); + Iterator it = ngsildQueryParams.keySet().iterator(); + while (it.hasNext()) { + String queryParameter = it.next(); + String queryValue = ngsildQueryParams.get(queryParameter)[0]; + logger.debug("Query parameter:" + queryParameter + ", value=" + queryValue); + GeoqueryRel geoqueryTokens; + switch (queryParameter) { + case NGSIConstants.QUERY_PARAMETER_ID: + qp.setId(queryValue); + break; + case NGSIConstants.QUERY_PARAMETER_IDPATTERN: + qp.setIdPattern(queryValue); + break; + case NGSIConstants.QUERY_PARAMETER_TYPE: + queryValue = expandQueryValues(linkHeaders, queryValue); + qp.setType(queryValue); + break; + case NGSIConstants.QUERY_PARAMETER_ATTRS: + queryValue = expandQueryValues(linkHeaders, queryValue); + qp.setAttrs(queryValue); + break; + case NGSIConstants.QUERY_PARAMETER_GEOREL: + String georel = queryValue; + String geometry = ""; + String coordinates = ""; + String geoproperty = ""; + if (ngsildQueryParams.get(NGSIConstants.QUERY_PARAMETER_GEOMETRY) != null) + geometry = ngsildQueryParams.get(NGSIConstants.QUERY_PARAMETER_GEOMETRY)[0]; + if (ngsildQueryParams.get(NGSIConstants.QUERY_PARAMETER_COORDINATES) != null) + coordinates = ngsildQueryParams.get(NGSIConstants.QUERY_PARAMETER_COORDINATES)[0]; + if (ngsildQueryParams.get(NGSIConstants.QUERY_PARAMETER_GEOPROPERTY) != null) { + geoproperty = ngsildQueryParams.get(NGSIConstants.QUERY_PARAMETER_GEOPROPERTY)[0]; + geoproperty = expandAttribute(geoproperty, linkHeaders); + } else { + geoproperty = NGSIConstants.QUERY_PARAMETER_DEFAULT_GEOPROPERTY; + } + + geoqueryTokens = queryParser.parseGeoRel(georel); + logger.debug(" Geoquery term georelOp: " + geoqueryTokens.getGeorelOp()); + + if (geoqueryTokens.getGeorelOp().isEmpty() || geometry.isEmpty() || coordinates.isEmpty()) { + throw new ResponseException(ErrorType.BadRequestData, + "Georel detected but georel, geometry or coordinates are empty!"); + } + if (!AppConstants.NGB_ALLOWED_GEOM_LIST.contains(geometry.toUpperCase())) { + throw new ResponseException(ErrorType.BadRequestData, + " geometry detected, Bad geometry!" + geometry); + } + validateCoordinates(coordinates); + GeoqueryRel gr = new GeoqueryRel(); + gr.setGeorelOp(geoqueryTokens.getGeorelOp()); + gr.setDistanceType(geoqueryTokens.getDistanceType()); + gr.setDistanceValue(geoqueryTokens.getDistanceValue()); + + qp.setGeorel(gr); + qp.setGeometry(geometry); + qp.setCoordinates(coordinates); + qp.setGeoproperty(geoproperty); + break; + case NGSIConstants.QUERY_PARAMETER_TIMEREL: + String timerel = queryValue; + String time = ""; + String timeproperty = ""; + String endTime = ""; + if (ngsildQueryParams.get(NGSIConstants.QUERY_PARAMETER_TIME) != null) + time = ngsildQueryParams.get(NGSIConstants.QUERY_PARAMETER_TIME)[0]; + if (ngsildQueryParams.get(NGSIConstants.QUERY_PARAMETER_TIMEPROPERTY) != null) { + timeproperty = ngsildQueryParams.get(NGSIConstants.QUERY_PARAMETER_TIMEPROPERTY)[0]; + timeproperty = expandAttribute(timeproperty, linkHeaders); + } else { + timeproperty = NGSIConstants.QUERY_PARAMETER_DEFAULT_TIMEPROPERTY; + } + if (ngsildQueryParams.get(NGSIConstants.QUERY_PARAMETER_ENDTIME) != null) + endTime = ngsildQueryParams.get(NGSIConstants.QUERY_PARAMETER_ENDTIME)[0]; + + if (time.isEmpty()) { + throw new ResponseException(ErrorType.BadRequestData, "Time is empty"); + } + if (timerel.equals(NGSIConstants.TIME_REL_BETWEEN) && endTime.isEmpty()) { + throw new ResponseException(ErrorType.BadRequestData, + "Timerel is between but endTime is empty"); + } + + qp.setTimerel(timerel); + qp.setTime(time); + qp.setTimeproperty(timeproperty); + qp.setEndTime(endTime); + break; + case NGSIConstants.QUERY_PARAMETER_QUERY: + qp.setQ(queryParser.parseQuery(queryValue, linkHeaders).toSql(temporalEntityFormat)); + break; + case NGSIConstants.QUERY_PARAMETER_OPTIONS: + List options = Arrays.asList(queryValue.split(",")); + qp.setIncludeSysAttrs(options.contains(NGSIConstants.QUERY_PARAMETER_OPTIONS_SYSATTRS)); + qp.setKeyValues(options.contains(NGSIConstants.QUERY_PARAMETER_OPTIONS_KEYVALUES)); + qp.setTemporalValues(options.contains(NGSIConstants.QUERY_PARAMETER_OPTIONS_TEMPORALVALUES)); + break; + } + } + return qp; + } catch (ResponseException e) { + throw e; // rethrow response exception object + } + // return null; + } + + private void validateCoordinates(String coordinates) throws ResponseException { + if(!coordinates.matches("^\\[*(\\[\\s*[-+]?(180(\\.0+)?|((1[0-7]\\d)|([1-9]?\\d))(\\.\\d+)?)(,\\d)?,[-+]?([1-8]?\\d(\\.\\d+)?|90(\\.0+)?)\\],?)+\\]*$")) { + throw new ResponseException(ErrorType.BadRequestData, "coordinates are not valid"); + } + + } + + private String expandQueryValues(List linkHeaders, String queryValue) throws ResponseException { + String[] temp = queryValue.split(","); + StringBuilder builder = new StringBuilder(); + for (String element : temp) { + builder.append(expandAttribute(element.trim(), linkHeaders)); + builder.append(","); + } + return builder.substring(0, builder.length() - 1); + } + + public String expandAttribute(String attribute, String payload, HttpServletRequest req) throws ResponseException { + List context; + if (req.getHeader(HttpHeaders.CONTENT_TYPE).equals(AppConstants.NGB_APPLICATION_JSON)) { + context = HttpUtils.getAtContext(req); + } else { + JsonNode json; + try { + json = objectMapper.readTree(payload); + } catch (IOException e) { + throw new ResponseException(ErrorType.BadRequestData, "Failed to read json from body"); + } + context = new ArrayList(); + if (json.has(NGSIConstants.JSON_LD_CONTEXT)) { + JsonNode tempContext = json.get(NGSIConstants.JSON_LD_CONTEXT); + try { + context.add(JsonUtils.fromString(tempContext.toString())); + } catch (IOException e) { + throw new ResponseException(ErrorType.BadRequestData); + } + } + } + return expandAttribute(attribute, context); + } + + public String expandAttribute(String attribute, List context) throws ResponseException { + logger.trace("resolveQueryLdContext():: started"); + + // process reserved attributes + switch (attribute) { + case NGSIConstants.QUERY_PARAMETER_ID: + return NGSIConstants.JSON_LD_ID; + case NGSIConstants.QUERY_PARAMETER_TYPE: + return NGSIConstants.JSON_LD_TYPE; + case NGSIConstants.QUERY_PARAMETER_CREATED_AT: + return NGSIConstants.NGSI_LD_CREATED_AT; + case NGSIConstants.QUERY_PARAMETER_MODIFIED_AT: + return NGSIConstants.NGSI_LD_MODIFIED_AT; + case NGSIConstants.QUERY_PARAMETER_OBSERVED_AT: + return NGSIConstants.NGSI_LD_OBSERVED_AT; + case NGSIConstants.QUERY_PARAMETER_LOCATION: + return NGSIConstants.NGSI_LD_LOCATION; + case NGSIConstants.QUERY_PARAMETER_OBSERVATION_SPACE: + return NGSIConstants.NGSI_LD_OBSERVATION_SPACE; + case NGSIConstants.QUERY_PARAMETER_OPERATION_SPACE: + return NGSIConstants.NGSI_LD_OPERATION_SPACE; + } + + // custom attributes + String attributeResolved = attribute; + logger.debug("link: " + context); + String jsonLdAttribute = getJsonLdAttribute(attribute, context); + logger.debug("jsonLdAttribute: " + jsonLdAttribute); + LocalDateTime start = LocalDateTime.now(); + String jsonLdAttributeResolved = contextResolver.expand(jsonLdAttribute, context, false, AppConstants.INTERNAL_CALL_ID); + LocalDateTime end = LocalDateTime.now(); + + logger.debug("jsonLdAttributeResolved: " + jsonLdAttributeResolved); + JsonParser parser = new JsonParser(); + JsonElement jsonTree = parser.parse(jsonLdAttributeResolved); + if (jsonTree.isJsonObject()) { + JsonObject jsonObject = jsonTree.getAsJsonObject(); + if (jsonObject.entrySet().size() > 0) + attributeResolved = jsonObject.entrySet().iterator().next().getKey(); + } + logger.trace("resolveQueryLdContext():: completed"); + return attributeResolved; + } + + private String getJsonLdAttribute(String attribute, List context) { + logger.trace("getJsonLdAttribute():: started"); + String jsonString = null; + try { + JsonNode rootNode = objectMapper.createObjectNode(); + // if (context != null) { + // ArrayNode contextNode = objectMapper.valueToTree(context); + // ((ObjectNode) rootNode).putArray("@context").addAll(contextNode); + // } + // cant be in here like that + ((ObjectNode) rootNode).put(attribute, ""); + jsonString = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(rootNode); + } catch (JsonProcessingException e) { + logger.error("Exception ::", e); + e.printStackTrace(); + } + logger.trace("getJsonLdAttribute():: completed"); + return jsonString; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ngsiqueries/QueryParser.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ngsiqueries/QueryParser.java new file mode 100644 index 0000000000000000000000000000000000000000..895394f95dde8130c8a19322f4c96a7a05a38f14 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/ngsiqueries/QueryParser.java @@ -0,0 +1,187 @@ +package eu.neclab.ngsildbroker.commons.ngsiqueries; + +import java.util.List; +import java.util.regex.Pattern; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.GeoqueryRel; +import eu.neclab.ngsildbroker.commons.datatypes.QueryTerm; +import eu.neclab.ngsildbroker.commons.exceptions.BadRequestException; + +@Component +public class QueryParser { + // Query = (QueryTerm / QueryTermAssoc) *(logicalOp (QueryTerm / + // QueryTermAssoc)) + // QueryTermAssoc = %x28 QueryTerm *(logicalOp QueryTerm) %x29 ; (QueryTerm) + + private String andOp = ";"; + private String orOp = "\\|"; + private String logicalOp = "((" + andOp + ")|(" + orOp + "))"; + private String quotedStr = "\".*\""; + private String equal = "=="; + private String unequal = "!="; + private String greater = ">"; + private String greaterEq = ">="; + private String less = "<"; + private String lessEq = "<="; + private String patternOp = "~="; + private String notPatternOp = "!~="; + private String operator = "(" + equal + "|" + unequal + "|" + greaterEq + "|" + greater + "|" + lessEq + "|" + less + + ")"; + @SuppressWarnings("unused") + private String allOperators = "(" + equal + "|" + unequal + "|" + greaterEq + "|" + greater + "|" + lessEq + "|" + + less + "|" + patternOp + "|" + notPatternOp + ")"; + private String dots = "\\.\\."; + private String dateTime = "\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d(,\\d\\d\\d\\d\\d\\d)?Z"; + private String date = "\\d\\d\\d\\d-\\d\\d-\\d\\d"; + private String time = "\\d\\d:\\d\\d:\\d\\d(,\\d\\d\\d\\d\\d\\d)?Z"; + private String comparableValue = "((" + quotedStr + ")|(" + dateTime + ")|(" + date + ")|(" + time + ")|(\\d+))"; + private String otherValue = "(true|false)"; + private String value = "(" + comparableValue + "|" + otherValue + ")"; + private String valueList = value + "(," + value + ")*"; + private String range = "(" + comparableValue + dots + comparableValue + ")"; + private String uri = "\\w+:(\\/?\\/?)[^\\s]+"; + private String compEqualityValue = "(" + otherValue + "|" + valueList + "|" + range + "|" + uri + ")"; + private String attrName = "\\w+"; + private String attrPathName = attrName + "(\\." + attrName + ")*"; + private String compoundAttrName = attrName + "\\[" + attrName + "\\]*"; + private String attribute = "(" + attrName + "|" + compoundAttrName + "|" + attrPathName + ")"; + private String queryTermCompare = "" + attribute + "" + operator + "" + comparableValue + ""; + private String queryTermEqual = "" + attribute + equal + compEqualityValue + ""; + private String queryTermUnequal = "" + attribute + "" + unequal + "" + compEqualityValue + ""; + private String queryTermPattern = "" + attribute + patternOp + "(.+)"; + private String queryTermNotPattern = "" + attribute + notPatternOp + "(.*)"; + private String queryTerm = "(" + queryTermCompare + ")|(" + queryTermEqual + ")|(" + queryTermUnequal + ")|(" + + queryTermPattern + ")|(" + queryTermNotPattern + ")"; + private String queryTermAssoc = "\\((" + queryTerm + "((" + logicalOp + ")(" + queryTerm + "))*)\\)"; + private String query = "((" + queryTerm + ")|(" + queryTermAssoc + "))" + "((" + logicalOp + ")((" + queryTerm + ")|(" + + queryTermAssoc + ")))*"; + @SuppressWarnings("unused") + //TODO validate queries still not working ... rework regex ??? + private Pattern p = Pattern.compile(query); + + @Autowired + ParamsResolver paramsResolver; + + public static void main(String[] args) throws Exception { + QueryParser test = new QueryParser(); + String attribName1 = "test1"; + String operator1 = "=="; + String operant1 = "\"teststring\""; + + String attribName2 = "test2"; + String operator2 = ">="; + String operant2 = "12345"; + + String attribName3 = "test3"; + String operator3 = "!="; + String operant3 = "\"testst123ring\""; + + String attribName4 = "test4"; + String operator4 = "<="; + String operant4 = "12345"; + String q = "(" + attribName1 + operator1 + operant1 + ";(" + attribName2 + operator2 + operant2 + "|" + attribName3 + operator3 + operant3 + "))|" + attribName4 + operator4 + operant4; + System.out.println(q); + QueryTerm term = test.parseQuery(q, null); + System.out.println(term); + // Pattern.compile(test.queryTermUnequal).matcher("brandName!=\"Mercedes\"").group(); + + } + + + + public QueryTerm parseQuery(String input, List linkHeaders) throws BadRequestException { +// Matcher matcher = p.matcher(input); +// if (!matcher.matches()) { +// throw new BadRequestException(); +// } + //TODO: regex doesn't validate brackets queries for some reason + QueryTerm root = new QueryTerm(linkHeaders, paramsResolver); + QueryTerm current = root; + boolean readingAttrib = true; + String attribName = ""; + String operator = ""; + String operant = ""; + for (byte b : input.getBytes()) { + + if (b == '(') { + QueryTerm child = new QueryTerm(linkHeaders, paramsResolver); + current.setFirstChild(child); + current = child; + readingAttrib = true; + + } else if (b == ';') { + QueryTerm next = new QueryTerm(linkHeaders, paramsResolver); + current.setOperant(operant); + current.setNext(next); + current.setNextAnd(true); + current = next; + readingAttrib = true; + + operant = ""; + + } else if (b == '|') { + QueryTerm next = new QueryTerm(linkHeaders, paramsResolver); + current.setOperant(operant); + current.setNext(next); + current.setNextAnd(false); + current = next; + readingAttrib = true; + + operant = ""; + + } else if (b == ')') { + current.setOperant(operant); + current = current.getParent(); + readingAttrib = true; + + operant = ""; + + } else if (b == '!' || b == '=' || b == '<' || b == '>' || b == '~') { + operator += (char) b; + readingAttrib = false; + if (!attribName.equals("")) { + current.setAttribute(attribName); + attribName = ""; + } + } else { + if (readingAttrib) { + attribName += (char) b; + } else { + if (!operator.equals("")) { + current.setOperator(operator); + operator = ""; + } + + operant += (char) b; + } + } + + } + if (!operant.equals("")) { + current.setOperant(operant); + } + return root; + } + + + + public GeoqueryRel parseGeoRel(String georel) throws BadRequestException { + String[] temp = georel.split(";"); + GeoqueryRel result = new GeoqueryRel(); + result.setGeorelOp(temp[0]); + if(temp[0].equals(NGSIConstants.GEO_REL_NEAR)) { + if(temp.length < 2) { + throw new BadRequestException(); + } + String[] maxMin = temp[1].split("=="); + result.setDistanceType(maxMin[0]); + result.setDistanceValue(maxMin[1]); + } + return result; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/securityConfig/ConfigDetails.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/securityConfig/ConfigDetails.java new file mode 100644 index 0000000000000000000000000000000000000000..2f8bac9a93d56dd870968e646b715dd5d3f5617f --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/securityConfig/ConfigDetails.java @@ -0,0 +1,27 @@ +package eu.neclab.ngsildbroker.commons.securityConfig; + +import java.util.List; + +public class ConfigDetails { + private String api; + private List role; + private String method; + public String getApi() { + return api; + } + public void setApi(String api) { + this.api = api; + } + public List getRole() { + return role; + } + public void setRole(List role) { + this.role = role; + } + public String getMethod() { + return method; + } + public void setMethod(String method) { + this.method = method; + } +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/securityConfig/ResourceConfigDetails.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/securityConfig/ResourceConfigDetails.java new file mode 100644 index 0000000000000000000000000000000000000000..5a0c5f8188a7c8834042581c95bc396bbdc06dc6 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/securityConfig/ResourceConfigDetails.java @@ -0,0 +1,28 @@ + +package eu.neclab.ngsildbroker.commons.securityConfig; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Lazy; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +@Configuration +public class ResourceConfigDetails { + @Autowired + SecurityConfig securityConfig; + + public void ngbSecurityConfig(HttpSecurity http) throws Exception { +// if (securityConfig.getSecEnabled().equalsIgnoreCase("true")) { +// http.httpBasic().disable(); +// for (ConfigDetails details : securityConfig.getAuth()) { +// List arrRole = details.getRole(); +// http.authorizeRequests().antMatchers(HttpMethod.resolve(details.getMethod()),details.getApi()) +// .hasAnyRole(arrRole.toString().replace("[","").replace("]","")); +// http.authorizeRequests().antMatchers(HttpMethod.resolve(details.getMethod()),details.getApi()) +// .permitAll(); +// } +// } else { + http.authorizeRequests().anyRequest().permitAll(); +// } + + } +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/securityConfig/SecurityConfig.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/securityConfig/SecurityConfig.java new file mode 100644 index 0000000000000000000000000000000000000000..a594a2c13e9d12ef2378d6326cedf18a902a4942 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/securityConfig/SecurityConfig.java @@ -0,0 +1,26 @@ +package eu.neclab.ngsildbroker.commons.securityConfig; + +import java.util.List; + +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.boot.context.properties.EnableConfigurationProperties; + +@EnableConfigurationProperties +@ConfigurationProperties +public class SecurityConfig { + + private String secEnabled; + private List auth; + public String getSecEnabled() { + return secEnabled; + } + public void setSecEnabled(String secEnabled) { + this.secEnabled = secEnabled; + } + public List getAuth() { + return auth; + } + public void setAuth(List auth) { + this.auth = auth; + } +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/BatchResultGsonAdapter.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/BatchResultGsonAdapter.java new file mode 100644 index 0000000000000000000000000000000000000000..b864a37479412029a818f8fa16a33079fe6bbdb5 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/BatchResultGsonAdapter.java @@ -0,0 +1,22 @@ +package eu.neclab.ngsildbroker.commons.serialization; + +import java.lang.reflect.Type; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; + +import eu.neclab.ngsildbroker.commons.datatypes.BatchResult; + +public class BatchResultGsonAdapter implements JsonSerializer { + + @Override + public JsonElement serialize(BatchResult src, Type typeOfSrc, JsonSerializationContext context) { + JsonObject top = new JsonObject(); + top.add("success", context.serialize(src.getSuccess())); + top.add("errors", context.serialize(src.getFails())); + return top; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/CSourceRegistrationGsonAdapter.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/CSourceRegistrationGsonAdapter.java new file mode 100644 index 0000000000000000000000000000000000000000..675c5fdf51549274d5cb0da16ab95e8d949622b3 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/CSourceRegistrationGsonAdapter.java @@ -0,0 +1,318 @@ +package eu.neclab.ngsildbroker.commons.serialization; + +import java.lang.reflect.Type; +import java.net.URI; +import java.net.URISyntaxException; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.Date; +import java.util.Iterator; +import java.util.List; +import java.util.Map.Entry; +import java.util.Set; + +import javax.sql.rowset.serial.SerialException; + +import com.google.gson.JsonArray; +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.CSourceRegistration; +import eu.neclab.ngsildbroker.commons.datatypes.EntityInfo; +import eu.neclab.ngsildbroker.commons.datatypes.Information; +import eu.neclab.ngsildbroker.commons.datatypes.TimeInterval; +import eu.neclab.ngsildbroker.commons.tools.SerializationTools; + +// TODO : complete serializer - include other fields also i.e. location,name etc. +public class CSourceRegistrationGsonAdapter + implements JsonDeserializer, JsonSerializer { + + + + @Override + public JsonElement serialize(CSourceRegistration src, Type typeOfSrc, JsonSerializationContext context) { + JsonObject top = new JsonObject(); + top.add(NGSIConstants.JSON_LD_ID, context.serialize(src.getId().toString())); + + JsonArray jsonArray = new JsonArray(); + jsonArray.add(src.getType()); + top.add(NGSIConstants.JSON_LD_TYPE, jsonArray); + + jsonArray = new JsonArray(); + JsonObject jsonObject = new JsonObject(); + jsonObject.add(NGSIConstants.JSON_LD_VALUE, context.serialize(src.getEndpoint().toString())); + jsonArray.add(jsonObject); + top.add(NGSIConstants.NGSI_LD_ENDPOINT, jsonArray); + + jsonArray = new JsonArray(); + jsonObject = new JsonObject(); + jsonObject.add(NGSIConstants.JSON_LD_VALUE, context.serialize(src.isInternal())); + jsonArray.add(jsonObject); + top.add(NGSIConstants.NGSI_LD_INTERNAL, jsonArray); + + jsonArray = new JsonArray(); + if (src.getInformation() != null) { + for (Information info : src.getInformation()) { + JsonObject infoObject = new JsonObject(); + Set properties = info.getProperties(); + Set relationsships = info.getRelationships(); + List entities = info.getEntities(); + + JsonArray attribs = new JsonArray(); + for (String property : properties) { + JsonObject tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_ID, context.serialize(property)); + attribs.add(tempObj); + } + infoObject.add(NGSIConstants.NGSI_LD_PROPERTIES, attribs); + + attribs = new JsonArray(); + for (String relation : relationsships) { + JsonObject tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_ID, context.serialize(relation)); + attribs.add(tempObj); + } + infoObject.add(NGSIConstants.NGSI_LD_RELATIONSHIPS, attribs); + + attribs = new JsonArray(); + JsonArray tempArray = new JsonArray(); + if (entities != null) { + for (EntityInfo entityInfo : entities) { + JsonObject entityObj = new JsonObject(); + if (entityInfo.getId() != null) { + entityObj.add(NGSIConstants.JSON_LD_ID, context.serialize(entityInfo.getId().toString())); + } + if (entityInfo.getType() != null) { + JsonArray temp2 = new JsonArray(); + temp2.add(entityInfo.getType()); + entityObj.add(NGSIConstants.JSON_LD_TYPE, temp2); + } + if (entityInfo.getIdPattern() != null) { + JsonArray temp2 = new JsonArray(); + jsonObject = new JsonObject(); + jsonObject.add(NGSIConstants.JSON_LD_VALUE, context.serialize(entityInfo.getIdPattern())); + temp2.add(jsonObject); + entityObj.add(NGSIConstants.NGSI_LD_ID_PATTERN, temp2); + } + tempArray.add(entityObj); + } + if (tempArray.size() > 0) { + infoObject.add(NGSIConstants.NGSI_LD_ENTITIES, tempArray); + } + } + jsonArray.add(infoObject); + top.add(NGSIConstants.NGSI_LD_INFORMATION, jsonArray); + } + } + if (src.getTimestamp() != null) { + jsonArray = new JsonArray(); + JsonObject timestampObject = new JsonObject(); + if (src.getTimestamp().getStart() != null) { + + timestampObject.add(NGSIConstants.NGSI_LD_TIMESTAMP_START, SerializationTools.getJson(src.getTimestamp().getStart(), context)); + } + if (src.getTimestamp().getStop() != null) { + + timestampObject.add(NGSIConstants.NGSI_LD_TIMESTAMP_END, SerializationTools.getJson(src.getTimestamp().getStop(), context)); + } + jsonArray.add(timestampObject); + + top.add(NGSIConstants.NGSI_LD_TIME_STAMP, jsonArray); + } + + if (src.getLocation() != null) { + jsonArray = new JsonArray(); + jsonObject = new JsonObject(); + jsonObject.add(NGSIConstants.JSON_LD_VALUE, SerializationTools.getJson(src.getLocation())); + jsonArray.add(jsonObject); + top.add(NGSIConstants.NGSI_LD_LOCATION, jsonArray); + } + + if(src.getExpires()!=null) { + top.add(NGSIConstants.NGSI_LD_EXPIRES, SerializationTools.getJson(src.getExpires(), context)); + } + + return top; + } + + @Override + public CSourceRegistration deserialize(JsonElement json, Type type, JsonDeserializationContext context) + throws JsonParseException { + + JsonObject top = json.getAsJsonObject(); + CSourceRegistration result = new CSourceRegistration(); + + for (Entry entry : top.entrySet()) { + String key = entry.getKey(); + JsonElement value = entry.getValue(); + if (key.equals(NGSIConstants.JSON_LD_ID)) { + try { + result.setId(new URI(value.getAsString())); + } catch (URISyntaxException e) { + throw new JsonParseException("Invalid Id " + value.getAsString()); + } + } else if (key.equals(NGSIConstants.JSON_LD_TYPE)) { + result.setType(value.getAsString()); + } else if (key.equals(NGSIConstants.NGSI_LD_INTERNAL)) { + result.setInternal(value.getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsBoolean()); + } else if (key.equals(NGSIConstants.NGSI_LD_ENDPOINT)) { + try { + result.setEndpoint(new URI(value.getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString())); + } catch (URISyntaxException e) { + throw new JsonParseException("Invalid endpoint uri " + value.getAsString()); + } + } else if (key.equals(NGSIConstants.NGSI_LD_INFORMATION)) { + List information = new ArrayList(); + JsonArray jsonEntities = value.getAsJsonArray(); + Iterator it = jsonEntities.iterator(); + while (it.hasNext()) { + Information info = new Information(); + List entities = info.getEntities(); + Set properties = info.getProperties(); + Set relationships = info.getRelationships(); + information.add(info); + JsonObject obj = it.next().getAsJsonObject(); + if (obj.has(NGSIConstants.NGSI_LD_ENTITIES)) { + Iterator entityIterator = obj.get(NGSIConstants.NGSI_LD_ENTITIES).getAsJsonArray() + .iterator(); + while (entityIterator.hasNext()) { + EntityInfo entityInfo = new EntityInfo(); + JsonObject entityObject = entityIterator.next().getAsJsonObject(); + if (entityObject.has(NGSIConstants.JSON_LD_ID)) { + try { + entityInfo.setId(new URI(entityObject.get(NGSIConstants.JSON_LD_ID).getAsString())); + } catch (URISyntaxException e) { + // TODO Check whether URI in EntityInfo for ID is correct. + } + } + if (entityObject.has(NGSIConstants.JSON_LD_TYPE)) { + entityInfo.setType(entityObject.get(NGSIConstants.JSON_LD_TYPE).getAsString()); + } + if (entityObject.has(NGSIConstants.NGSI_LD_ID_PATTERN)) { + entityInfo.setIdPattern( + entityObject.get(NGSIConstants.NGSI_LD_ID_PATTERN).getAsJsonArray().get(0) + .getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } + entities.add(entityInfo); + } + } + if (obj.has(NGSIConstants.NGSI_LD_RELATIONSHIPS)) { + Iterator attribs = obj.get(NGSIConstants.NGSI_LD_RELATIONSHIPS).getAsJsonArray() + .iterator(); + while (attribs.hasNext()) { + relationships + .add(attribs.next().getAsJsonObject().get(NGSIConstants.JSON_LD_ID).getAsString()); + } + } + if (obj.has(NGSIConstants.NGSI_LD_PROPERTIES)) { + Iterator attribs = obj.get(NGSIConstants.NGSI_LD_PROPERTIES).getAsJsonArray() + .iterator(); + while (attribs.hasNext()) { + properties + .add(attribs.next().getAsJsonObject().get(NGSIConstants.JSON_LD_ID).getAsString()); + } + } + } + result.setInformation(information); + } else if (key.equals(NGSIConstants.NGSI_LD_LOCATION)) { + String geoValue = value.getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString(); + result.setLocation( DataSerializer.getGeojsonGeometry(geoValue) ); + } else if (key.equals(NGSIConstants.NGSI_LD_TIME_STAMP)) { + result.setTimestamp(new TimeInterval()); + JsonObject timestampObject = value.getAsJsonArray().get(0).getAsJsonObject(); + if (timestampObject.has(NGSIConstants.NGSI_LD_TIMESTAMP_START)) { + String dateTime = timestampObject.get(NGSIConstants.NGSI_LD_TIMESTAMP_START).getAsJsonArray().get(0) + .getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString(); + try { + result.getTimestamp().setStart(SerializationTools.date2Long(dateTime)); + } catch (Exception e) { + throw new JsonParseException(e.getMessage()); + } + } + if (timestampObject.has(NGSIConstants.NGSI_LD_TIMESTAMP_END)) { + String dateTime = timestampObject.get(NGSIConstants.NGSI_LD_TIMESTAMP_END).getAsJsonArray().get(0) + .getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString(); + try { + result.getTimestamp().setStop(SerializationTools.date2Long(dateTime)); + } catch (Exception e) { + throw new JsonParseException(e.getMessage()); + } + } + }else if(key.equals(NGSIConstants.NGSI_LD_EXPIRES)) { + String expires=value.getAsJsonArray().get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString(); + try { + result.setExpires(SerializationTools.date2Long(expires)); + } catch (Exception e) { + throw new JsonParseException(e.getMessage()); + } + } + } + return result; + } + + // private JsonArray parseGeoLocation(JsonNode locationGeoJson) { + // String type = locationGeoJson.get(NGSIConstants.CSOURCE_TYPE).asText(); + // JsonArray jsonArray = new JsonArray(); + // JsonObject jsonObject = new JsonObject(); + // JsonArray typeArray = new JsonArray(); + // typeArray.add(type); + // jsonObject.add(NGSIConstants.JSON_LD_TYPE, typeArray); + // + // JsonArray coordinatesArray = new JsonArray(); + // JsonNode coordinatesJson = + // locationGeoJson.get(NGSIConstants.CSOURCE_COORDINATES); + // if (type.equals(NGSIConstants.GEO_TYPE_POINT)) { + // if (coordinatesJson.isArray()) { + // for (final JsonNode objNode : coordinatesJson) { + // JsonObject tempJsonObject = new JsonObject(); + // tempJsonObject.addProperty(NGSIConstants.JSON_LD_VALUE, objNode.asDouble()); + // coordinatesArray.add(tempJsonObject); + // } + // } + // jsonObject.add(NGSIConstants.NGSI_LD_COORDINATES, coordinatesArray); + // } else if (type.equals(NGSIConstants.GEO_TYPE_POLYGON)) { + // coordinatesJson = coordinatesJson.get(0); + // if (coordinatesJson.isArray()) { + // Iterator it = coordinatesJson.iterator(); + // while (it.hasNext()) { + // JsonNode node = (JsonNode) it.next(); + // for (final JsonNode objNode : node) { + // JsonObject tempJsonObject = new JsonObject(); + // tempJsonObject.addProperty(NGSIConstants.JSON_LD_VALUE, objNode.asDouble()); + // coordinatesArray.add(tempJsonObject); + // } + // } + // } + // jsonObject.add(NGSIConstants.NGSI_LD_COORDINATES, coordinatesArray); + // } else if (type.equals(NGSIConstants.GEO_TYPE_LINESTRING)) { + // coordinatesJson = coordinatesJson.get(0); + // if (coordinatesJson.isArray()) { + // Iterator it = coordinatesJson.iterator(); + // while (it.hasNext()) { + // JsonNode node = (JsonNode) it.next(); + // for (final JsonNode objNode : node) { + // JsonObject tempJsonObject = new JsonObject(); + // tempJsonObject.addProperty(NGSIConstants.JSON_LD_VALUE, objNode.asDouble()); + // coordinatesArray.add(tempJsonObject); + // } + // } + // } + // jsonObject.add(NGSIConstants.NGSI_LD_COORDINATES, coordinatesArray); + // } + // jsonArray.add(jsonObject); + // return jsonArray; + // } +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/DataSerializer.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/DataSerializer.java new file mode 100644 index 0000000000000000000000000000000000000000..9d4fe6e3c1f700366efaf8a05501c82e286d2e81 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/DataSerializer.java @@ -0,0 +1,165 @@ +package eu.neclab.ngsildbroker.commons.serialization; + +import java.io.InputStream; +import java.io.InputStreamReader; +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.List; + +import com.github.filosganga.geogson.gson.GeometryAdapterFactory; +import com.github.filosganga.geogson.jts.JtsAdapterFactory; +import com.github.filosganga.geogson.model.Geometry; +import com.github.filosganga.geogson.model.MultiPolygon; +import com.google.common.reflect.TypeToken; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; + +import eu.neclab.ngsildbroker.commons.datatypes.BatchResult; +import eu.neclab.ngsildbroker.commons.datatypes.CSourceRegistration; +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.datatypes.GeoValue; +import eu.neclab.ngsildbroker.commons.datatypes.Notification; +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.datatypes.SubscriptionRequest; +import eu.neclab.ngsildbroker.commons.datatypes.TemporalEntityStorageKey; +import eu.neclab.ngsildbroker.commons.datatypes.TypedValue; + +public class DataSerializer { + + // private static final Type INDEX_SET_TYPE = new TypeToken>() { + // }.getType(); + private static final Gson GSON = createGsonObject(); + private static final Gson SPECIAL_GSON = createSpecialGsonObject(); + + // private static final Type propertiesType = new TypeToken>() { + // + // }.getType(); + + public static void main(String[] args) { + Entity entity = getEntity("{\r\n" + " \"http://schema.org/vehicle/brandName\": [\r\n" + " {\r\n" + + " \"@type\": [\r\n" + " \"http://schema.org/ngsi-ld/Property\"\r\n" + " ],\r\n" + + " \"http://schema.org/ngsi-ld/hasValue\": [\r\n" + " {\r\n" + + " \"@value\": \"Mercedes\"\r\n" + " }\r\n" + " ]\r\n" + " }\r\n" + + " ],\r\n" + " \"@id\": \"urn:ngsi-ld:Vehicle:A4010\",\r\n" + + " \"http://schema.org/common/isParked\": [\r\n" + " {\r\n" + + " \"http://schema.org/ngsi-ld/observedAt\": [\r\n" + " {\r\n" + + " \"@type\": \"http://schema.org/ngsi-ld/DateTime\",\r\n" + + " \"@value\": \"2017-07-29T12:00:04\"\r\n" + " }\r\n" + " ],\r\n" + + " \"http://schema.org/common/providedBy\": [\r\n" + " {\r\n" + + " \"http://schema.org/ngsi-ld/hasObject\": [\r\n" + " {\r\n" + + " \"@id\": \"urn:ngsi-ld:Person:Bob\"\r\n" + " }\r\n" + + " ],\r\n" + " \"@type\": [\r\n" + + " \"http://schema.org/ngsi-ld/Relationship\"\r\n" + " ]\r\n" + + " }\r\n" + " ],\r\n" + " \"@type\": [\r\n" + + " \"http://schema.org/ngsi-ld/Relationship\"\r\n" + " ],\r\n" + + " \"http://schema.org/ngsi-ld/hasValue\": [\r\n" + " {\r\n" + + " \"@value\": \"urn:ngsi-ld:OffStreetParking:Downtown1\"\r\n" + " }\r\n" + + " ]\r\n" + " }\r\n" + " ],\r\n" + " \"@type\": [\r\n" + + " \"http://schema.org/vehicle/Vehicle\"\r\n" + " ]\r\n" + "}"); + System.out.println("Entity :: " + entity); + /* + * for(Property p:entity.getProperties()) { + * System.out.println("P : "+p.getValue()); } + */ + System.out.println("Json simplified ::" + toJson(entity)); + } + + private DataSerializer() { + // Do nothing. (prevent instantiation) + } + + private static Gson createGsonObject() { + GsonBuilder builder = new GsonBuilder(); + registerTypes(builder); + return builder.setPrettyPrinting().create(); + } + + private static Gson createSpecialGsonObject() { + GsonBuilder builder = new GsonBuilder(); + builder.registerTypeAdapter(Entity.class, new EntityGsonAdapter(true)); + return builder.setPrettyPrinting().create(); + } + + private static void registerTypes(GsonBuilder builder) { + // Index metadata + + builder.registerTypeAdapter(Entity.class, new EntityGsonAdapter(false)); + builder.registerTypeAdapter(Subscription.class, new SubscriptionGsonAdapter()); + builder.registerTypeAdapter(CSourceRegistration.class, new CSourceRegistrationGsonAdapter()); + builder.registerTypeAdapter(GeoValue.class, new GeoValueGsonAdapter()); + builder.registerTypeAdapter(BatchResult.class, new BatchResultGsonAdapter()); + builder.registerTypeAdapterFactory(new GeometryAdapterFactory()); + builder.registerTypeAdapterFactory(new JtsAdapterFactory()); + builder.registerTypeAdapter(Notification.class, new NotificationGsonAdapter()); + builder.registerTypeAdapter(TypedValue.class, new TypedValueGsonAdapter()); + builder.registerTypeAdapter(SerializationTypes.entitiesType, new EntitiesGsonAdapter()); + // builder.registerTypeAdapter(propertiesType, new PropertiesGsonAdapter()); + } + + public static List getEntities(String json) { + return GSON.fromJson(json, SerializationTypes.entitiesType); + } + + public static List getEntities(InputStreamReader in) { + return GSON.fromJson(in, SerializationTypes.entitiesType); + } + + public static Entity getEntity(String json) { + return GSON.fromJson(json, Entity.class); + } + + public static Subscription getSubscription(String json) { + return GSON.fromJson(json, Subscription.class); + } + + public static SubscriptionRequest getSubscriptionRequest(String json) { + return GSON.fromJson(json, SubscriptionRequest.class); + } + + public static Notification getNotification(String json) { + return GSON.fromJson(json, Notification.class); + } + + public static CSourceRegistration getCSourceRegistration(String json) { + return GSON.fromJson(json, CSourceRegistration.class); + } + + public static GeoValue getGeoValue(String json) { + return GSON.fromJson(json, GeoValue.class); + } + + public static Geometry getGeojsonGeometry(String json) { + return GSON.fromJson(json, Geometry.class); + } + + // public static List getProperties(String json){ + // return GSON.fromJson(json, propertiesType); + // } + + // get collection of entities from json + public static List getCSourceRegistrations(String json, Type type) { + return GSON.fromJson(json, type); + } + + public static String toJson(Object obj) { + return GSON.toJson(obj); + } + + public static Entity getPartialEntity(String json) { + return SPECIAL_GSON.fromJson(json, Entity.class); + } + + public static QueryParams getQueryParams(String json) { + return GSON.fromJson(json, QueryParams.class); + } + + public static ArrayList getStringList(String json) { + return GSON.fromJson(json, ArrayList.class); + } + + public static TemporalEntityStorageKey getTemporalEntityStorageKey(String json) { + return GSON.fromJson(json, TemporalEntityStorageKey.class); + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/EntitiesGsonAdapter.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/EntitiesGsonAdapter.java new file mode 100644 index 0000000000000000000000000000000000000000..a48a807cf91cf5034728fd00894e593ef6c79657 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/EntitiesGsonAdapter.java @@ -0,0 +1,39 @@ +package eu.neclab.ngsildbroker.commons.serialization; + +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.List; + +import com.google.gson.JsonArray; +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; +import com.google.gson.JsonElement; +import com.google.gson.JsonParseException; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; + +import eu.neclab.ngsildbroker.commons.datatypes.Entity; + +public class EntitiesGsonAdapter implements JsonSerializer>, JsonDeserializer>{ + + @Override + public List deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) + throws JsonParseException { + JsonArray top = json.getAsJsonArray(); + ArrayList result = new ArrayList(top.size()); + for(JsonElement element: top) { + result.add(context.deserialize(element, SerializationTypes.entityType)); + } + return result; + } + + @Override + public JsonElement serialize(List src, Type typeOfSrc, JsonSerializationContext context) { + JsonArray top = new JsonArray(); + for(Entity entity: src) { + top.add(context.serialize(entity)); + } + return top; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/EntityGsonAdapter.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/EntityGsonAdapter.java new file mode 100644 index 0000000000000000000000000000000000000000..c5c2a0560580684fa0adc9d97923f32c4cc00941 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/EntityGsonAdapter.java @@ -0,0 +1,198 @@ +package eu.neclab.ngsildbroker.commons.serialization; + +import java.lang.reflect.Type; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Map.Entry; + +import com.google.gson.JsonArray; +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonPrimitive; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.datatypes.GeoProperty; +import eu.neclab.ngsildbroker.commons.datatypes.Property; +import eu.neclab.ngsildbroker.commons.datatypes.Relationship; +import eu.neclab.ngsildbroker.commons.tools.SerializationTools; + +public class EntityGsonAdapter implements JsonDeserializer, JsonSerializer { + + private boolean allowIncomplete; + + public EntityGsonAdapter(boolean allowIncomplete) { + this.allowIncomplete = allowIncomplete; + } + + @Override + public JsonElement serialize(Entity entity, Type type, JsonSerializationContext context) { + JsonObject top = new JsonObject(); + top.add(NGSIConstants.JSON_LD_ID, context.serialize(entity.getId())); + top.add(NGSIConstants.JSON_LD_TYPE, context.serialize(entity.getType())); + if (entity.getProperties() != null) { + for (Property property : entity.getProperties()) { + top.add(property.getId().toString(), SerializationTools.getJson(property, context)); + } + } + if (entity.getRelationships() != null) { + for (Relationship relationship : entity.getRelationships()) { + top.add(relationship.getId().toString(), SerializationTools.getJson(relationship, context)); + } + } + if (entity.getGeoProperties() != null) { + for (GeoProperty geoProperty : entity.getGeoProperties()) { + top.add(geoProperty.getId().toString(), SerializationTools.getJson(geoProperty, context)); + } + } + if (entity.getCreatedAt() != null && entity.getCreatedAt() > 0) { + top.add(NGSIConstants.NGSI_LD_CREATED_AT, SerializationTools.getJson(entity.getCreatedAt(), context)); + } + if (entity.getModifiedAt() != null && entity.getModifiedAt() > 0) { + top.add(NGSIConstants.NGSI_LD_MODIFIED_AT, SerializationTools.getJson(entity.getModifiedAt(), context)); + } + if (entity.getLocation() != null) { + top.add(entity.getLocation().getId().toString(), SerializationTools.getJson(entity.getLocation(), context)); + } + if (entity.getObservationSpace() != null) { + top.add(entity.getObservationSpace().getId().toString(), + SerializationTools.getJson(entity.getObservationSpace(), context)); + } + if (entity.getOperationSpace() != null) { + top.add(entity.getOperationSpace().getId().toString(), + SerializationTools.getJson(entity.getOperationSpace(), context)); + } + if (entity.getName() != null) { + top.add(NGSIConstants.NGSI_LD_NAME, new JsonPrimitive(entity.getName())); + } + return top; + } + + @Override + public Entity deserialize(JsonElement json, Type classType, JsonDeserializationContext context) + throws JsonParseException { + JsonObject top = json.getAsJsonObject(); + URI id = null; + String type = null; + String name = null; + GeoProperty location = null; + GeoProperty observationSpace = null; + GeoProperty operationSpace = null; + ArrayList properties = new ArrayList(); + ArrayList relationships = new ArrayList(); + ArrayList geoproperties = new ArrayList(); + Long createdAt = null, observedAt = null, modifiedAt = null; + String refToAccessControl = null; + for (Entry entry : top.entrySet()) { + String key = entry.getKey(); + JsonObject objValue = null; + switch (key) { + case NGSIConstants.JSON_LD_ID: + try { + id = new URI(entry.getValue().getAsString()); + } catch (URISyntaxException e1) { + throw new JsonParseException("ID field is not a valid URI"); + } + break; + case NGSIConstants.JSON_LD_TYPE: + type = entry.getValue().getAsString(); + break; + case NGSIConstants.NGSI_LD_LOCATION: + location = SerializationTools.parseGeoProperty(entry.getValue().getAsJsonArray(), + NGSIConstants.NGSI_LD_LOCATION); + break; + case NGSIConstants.NGSI_LD_OPERATION_SPACE: + operationSpace = SerializationTools.parseGeoProperty(entry.getValue().getAsJsonArray(), + NGSIConstants.NGSI_LD_OPERATION_SPACE); + break; + case NGSIConstants.NGSI_LD_OBSERVATION_SPACE: + observationSpace = SerializationTools.parseGeoProperty(entry.getValue().getAsJsonArray(), + NGSIConstants.NGSI_LD_OBSERVATION_SPACE); + break; + case NGSIConstants.NGSI_LD_CREATED_AT: + try { + createdAt = SerializationTools.date2Long(entry.getValue().getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } catch (Exception e) { + throw new JsonParseException(e); + } + break; + case NGSIConstants.NGSI_LD_MODIFIED_AT: + try { + modifiedAt = SerializationTools.date2Long(entry.getValue().getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } catch (Exception e) { + throw new JsonParseException(e); + } + break; + case NGSIConstants.NGSI_LD_OBSERVED_AT: + try { + observedAt = SerializationTools.date2Long(entry.getValue().getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } catch (Exception e) { + throw new JsonParseException(e); + } + break; + case NGSIConstants.NGSI_LD_NAME: + if (!entry.getValue().getAsJsonArray().get(0).getAsJsonObject().has(NGSIConstants.JSON_LD_TYPE)) { + name = entry.getValue().getAsJsonArray().get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE) + .getAsString(); + break; + } + default: + + JsonArray topLevelArray = entry.getValue().getAsJsonArray(); + objValue = topLevelArray.get(0).getAsJsonObject(); + + if (objValue.has(NGSIConstants.JSON_LD_TYPE)) { + String valueType = objValue.get(NGSIConstants.JSON_LD_TYPE).getAsJsonArray().get(0).getAsString(); + if (valueType.equals(NGSIConstants.NGSI_LD_PROPERTY)) { + Property property = SerializationTools.parseProperty(topLevelArray, key); + properties.add(property); + } else if (valueType.equals(NGSIConstants.NGSI_LD_RELATIONSHIP)) { + Relationship relationship = SerializationTools.parseRelationship(topLevelArray, key); + relationships.add(relationship); + } else if (valueType.equals(NGSIConstants.NGSI_LD_GEOPROPERTY)) { + GeoProperty geoproperty = SerializationTools.parseGeoProperty(topLevelArray, key); + geoproperties.add(geoproperty); + } else { + throw new JsonParseException("Unknown top level entry provided " + key); + } + } else { + throw new JsonParseException("Unknown top level entry provided " + key); + } + + break; + } + + } + if (id == null && !allowIncomplete) { + throw new JsonParseException("ID field is mandertory"); + } + if (type == null && !allowIncomplete) { + throw new JsonParseException("Type field is mandertory"); + } + Entity result = new Entity(id, location, observationSpace, operationSpace, properties, refToAccessControl, + relationships, type, geoproperties); + if (createdAt != null) { + result.setCreatedAt(createdAt); + } + if (modifiedAt != null) { + result.setModifiedAt(modifiedAt); + } + if (observedAt != null) { + result.setObservedAt(observedAt); + } + if (name != null) { + result.setName(name); + } + return result; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/GeoValueGsonAdapter.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/GeoValueGsonAdapter.java new file mode 100644 index 0000000000000000000000000000000000000000..5de01d41bf762d9690d19b09c91ba87791bb34f6 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/GeoValueGsonAdapter.java @@ -0,0 +1,75 @@ +package eu.neclab.ngsildbroker.commons.serialization; + +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; + +import com.google.gson.Gson; +import com.google.gson.JsonArray; +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonPrimitive; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; +import com.google.gson.reflect.TypeToken; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.GeoValue; + +public class GeoValueGsonAdapter implements JsonDeserializer, JsonSerializer { + + @Override + public JsonElement serialize(GeoValue src, Type typeOfSrc, JsonSerializationContext context) { + JsonObject top = new JsonObject(); + top.add(NGSIConstants.GEO_JSON_TYPE, new JsonPrimitive(src.getType())); + JsonElement coordinates = new Gson().toJsonTree(src.getCoordinates(), new TypeToken>() {}.getType()); + top.add(NGSIConstants.GEO_JSON_COORDINATES, coordinates); + // must return the strigified version + JsonPrimitive stringified = new JsonPrimitive(top.toString()); + return stringified; + } + + @Override + public GeoValue deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) + throws JsonParseException { + JsonObject top = json.getAsJsonObject(); + GeoValue value = new GeoValue(); + value.setType(top.get(NGSIConstants.GEO_JSON_TYPE).getAsString()); + JsonArray jsonCoordinates; + switch (value.getType()) { + case NGSIConstants.GEO_TYPE_POINT: + jsonCoordinates = top.get(NGSIConstants.GEO_JSON_COORDINATES).getAsJsonArray(); + break; + case NGSIConstants.GEO_TYPE_LINESTRING: + jsonCoordinates = top.get(NGSIConstants.GEO_JSON_COORDINATES).getAsJsonArray().get(0).getAsJsonArray(); + break; + case NGSIConstants.GEO_TYPE_POLYGON: + jsonCoordinates = top.get(NGSIConstants.GEO_JSON_COORDINATES).getAsJsonArray().get(0).getAsJsonArray() + .get(0).getAsJsonArray(); + break; + default: + throw new JsonParseException("Unexpected GeoJson type"); + } + + ArrayList coordinates = new ArrayList(); + Iterator it = jsonCoordinates.iterator(); + while (it.hasNext()) { + JsonElement element = it.next(); + if(element.isJsonArray()) { + Iterator it2 = element.getAsJsonArray().iterator(); + while(it2.hasNext()) { + coordinates.add(it2.next().getAsDouble()); + } + }else { + coordinates.add(element.getAsDouble()); + } + } + value.setCoordinates(coordinates); + return value; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/NotificationGsonAdapter.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/NotificationGsonAdapter.java new file mode 100644 index 0000000000000000000000000000000000000000..36675788f8c999ce885896bb5cf50f903d7e508a --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/NotificationGsonAdapter.java @@ -0,0 +1,88 @@ +package eu.neclab.ngsildbroker.commons.serialization; + +import java.lang.reflect.Type; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Date; +import java.util.List; +import java.util.Map.Entry; + +import com.google.gson.JsonArray; +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonPrimitive; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.datatypes.Notification; +import eu.neclab.ngsildbroker.commons.tools.SerializationTools; + +public class NotificationGsonAdapter implements JsonSerializer, JsonDeserializer{ + + @Override + public JsonElement serialize(Notification src, Type typeOfSrc, JsonSerializationContext context) { + JsonObject top = new JsonObject(); + JsonArray temp = new JsonArray(); + top.add(NGSIConstants.JSON_LD_ID, new JsonPrimitive(src.getId().toString())); + temp.add(src.getType()); + top.add(NGSIConstants.JSON_LD_TYPE, temp); + temp = new JsonArray(); + JsonObject tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_ID, new JsonPrimitive(src.getSubscriptionId().toString())); + temp.add(tempObj); + top.add(NGSIConstants.NGSI_LD_SUBSCRIPTION_ID, temp); + + top.add(NGSIConstants.NGSI_LD_NOTIFIED_AT, SerializationTools.getJson(src.getNotifiedAt(), context)); + + top.add(NGSIConstants.NGSI_LD_DATA, context.serialize(src.getData(),SerializationTypes.entitiesType)); + return top; + } + + @Override + public Notification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) + throws JsonParseException { + JsonObject top = json.getAsJsonObject(); + URI id = null; + Long notifiedAt = null; + URI subscriptionId = null; + List data = null; + for(Entry entry: top.entrySet()) { + String key = entry.getKey(); + JsonElement value = entry.getValue(); + if(NGSIConstants.JSON_LD_ID.equals(key)) { + try { + id = new URI(value.getAsString()); + } catch (URISyntaxException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + }else if(NGSIConstants.NGSI_LD_SUBSCRIPTION_ID.equals(key)) { + try { + subscriptionId = new URI(value.getAsJsonArray().get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_ID).getAsString()); + } catch (URISyntaxException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + }else if(NGSIConstants.NGSI_LD_NOTIFIED_AT.equals(key)) { + try { + notifiedAt = SerializationTools.date2Long(value.getAsJsonArray().get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + }else if(NGSIConstants.NGSI_LD_DATA.equals(key)) { + data = context.deserialize(value, SerializationTypes.entitiesType); + } + } + if(id == null || data == null || notifiedAt == null || subscriptionId == null) { + throw new JsonParseException("Missing field in notification"); + } + return new Notification(id, notifiedAt, subscriptionId, data); + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/PropertiesGsonAdapter.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/PropertiesGsonAdapter.java new file mode 100644 index 0000000000000000000000000000000000000000..2a57a1df45da50722a7c18f46216c359edfb6cd7 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/PropertiesGsonAdapter.java @@ -0,0 +1,44 @@ +package eu.neclab.ngsildbroker.commons.serialization; + +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.List; +import java.util.Map.Entry; +import java.util.Set; + +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; + +import eu.neclab.ngsildbroker.commons.datatypes.Property; +import eu.neclab.ngsildbroker.commons.tools.SerializationTools; + +public class PropertiesGsonAdapter implements JsonDeserializer>, JsonSerializer>{ + + + @Override + public JsonElement serialize(List src, Type typeOfSrc, JsonSerializationContext context) { + JsonObject top = new JsonObject(); + for(Property property: src) { + top.add(property.getId().toString(), SerializationTools.getJson(property, context)); + } + return top; + } + + @Override + public List deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) + throws JsonParseException { + ArrayList result = new ArrayList(); + JsonObject top = json.getAsJsonObject(); + Set> jsonProperties = top.entrySet(); + for(Entry entry: jsonProperties) { + result.add(SerializationTools.parseProperty(entry.getValue().getAsJsonArray(), entry.getKey())); + } + return result; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/SerializationTypes.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/SerializationTypes.java new file mode 100644 index 0000000000000000000000000000000000000000..a21306ac33377de60306400e20d60a60d0928cc2 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/SerializationTypes.java @@ -0,0 +1,15 @@ +package eu.neclab.ngsildbroker.commons.serialization; + +import java.lang.reflect.Type; +import java.util.List; + +import com.google.common.reflect.TypeToken; + +import eu.neclab.ngsildbroker.commons.datatypes.Entity; + +public class SerializationTypes { + public static final Type entitiesType = new TypeToken>() { + }.getType(); + public static final Type entityType = new TypeToken() { + }.getType(); +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/SubscriptionGsonAdapter.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/SubscriptionGsonAdapter.java new file mode 100644 index 0000000000000000000000000000000000000000..a7b03ac2cc0c4657c29dfeaab035a6a2fb4aa7ee --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/SubscriptionGsonAdapter.java @@ -0,0 +1,462 @@ +package eu.neclab.ngsildbroker.commons.serialization; + +import java.lang.reflect.Type; +import java.net.URI; +import java.net.URISyntaxException; +import java.time.Instant; +import java.time.temporal.TemporalAccessor; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Map.Entry; +import com.google.gson.JsonArray; +import com.google.gson.JsonDeserializationContext; +import com.google.gson.JsonDeserializer; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonPrimitive; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.EndPoint; +import eu.neclab.ngsildbroker.commons.datatypes.EntityInfo; +import eu.neclab.ngsildbroker.commons.datatypes.GeoRelation; +import eu.neclab.ngsildbroker.commons.datatypes.LDGeoQuery; +import eu.neclab.ngsildbroker.commons.datatypes.NotificationParam; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.enums.Format; +import eu.neclab.ngsildbroker.commons.enums.Geometry; +import eu.neclab.ngsildbroker.commons.tools.SerializationTools; + +public class SubscriptionGsonAdapter implements JsonDeserializer, JsonSerializer { + + @Override + public Subscription deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) + throws JsonParseException { + JsonObject top; + if (json.isJsonArray()) { + top = json.getAsJsonArray().get(0).getAsJsonObject(); + } else { + top = json.getAsJsonObject(); + } + Subscription result = new Subscription(); + for (Entry entry : top.entrySet()) { + String key = entry.getKey(); + JsonElement value = entry.getValue(); + if (key.equals(NGSIConstants.JSON_LD_ID)) { + try { + result.setId(new URI(value.getAsString())); + } catch (URISyntaxException e) { + throw new JsonParseException("Invalid Id " + value.getAsString()); + } + } else if (key.equals(NGSIConstants.JSON_LD_TYPE)) { + result.setType(value.getAsString()); + } else if (key.equals(NGSIConstants.NGSI_LD_ENTITIES)) { + JsonArray entities = value.getAsJsonArray(); + Iterator it = entities.iterator(); + while (it.hasNext()) { + JsonObject obj = it.next().getAsJsonObject(); + EntityInfo entity = new EntityInfo(); + if (obj.has(NGSIConstants.NGSI_LD_ID_PATTERN)) { + entity.setIdPattern(obj.get(NGSIConstants.NGSI_LD_ID_PATTERN).getAsJsonArray().get(0) + .getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } + if (obj.has(NGSIConstants.JSON_LD_ID)) { + try { + entity.setId(new URI(obj.get(NGSIConstants.JSON_LD_ID).getAsString())); + } catch (URISyntaxException e) { + throw new JsonParseException( + "Invalid Id to subscribe to " + obj.get(NGSIConstants.JSON_LD_ID).getAsString()); + } + } + if (obj.has(NGSIConstants.JSON_LD_TYPE)) { + entity.setType(obj.get(NGSIConstants.JSON_LD_TYPE).getAsJsonArray().get(0).getAsString()); + } else { + throw new JsonParseException("type is a mandatory field in all entries of entities"); + } + result.addEntityInfo(entity); + } + + } else if (key.equals(NGSIConstants.NGSI_LD_GEO_QUERY)) { + JsonObject query = value.getAsJsonArray().get(0).getAsJsonObject(); + LDGeoQuery geoQuery = new LDGeoQuery(); + Iterator jsonCoordinates = query.getAsJsonArray(NGSIConstants.NGSI_LD_COORDINATES) + .iterator(); + ArrayList coordinates = new ArrayList(); + while (jsonCoordinates.hasNext()) { + coordinates.add( + jsonCoordinates.next().getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsDouble()); + } + geoQuery.setCoordinates(coordinates); + String geometry = query.getAsJsonArray(NGSIConstants.NGSI_LD_GEOMETRY).get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString(); + if (geometry.equalsIgnoreCase("point")) { + geoQuery.setGeometry(Geometry.Point); + } else if (geometry.equalsIgnoreCase("polygon")) { + geoQuery.setGeometry(Geometry.Polygon); + } + String geoRelString = query.getAsJsonArray(NGSIConstants.NGSI_LD_GEO_REL).get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString(); + String[] relSplit = geoRelString.split(";"); + GeoRelation geoRel = new GeoRelation(); + geoRel.setRelation(relSplit[0]); + for (int i = 1; i < relSplit.length; i++) { + String[] temp = relSplit[i].split("=="); + Object distance; + try { + distance = Integer.parseInt(temp[1]); + } catch (NumberFormatException e) { + distance = Double.parseDouble(temp[1]); + } + if (temp[0].equalsIgnoreCase("maxDistance")) { + + geoRel.setMaxDistance(distance); + } else if (temp[0].equalsIgnoreCase("minDistance")) { + geoRel.setMinDistance(distance); + } + } + geoQuery.setGeoRelation(geoRel); + result.setLdGeoQuery(geoQuery); + + } else if (key.equals(NGSIConstants.NGSI_LD_NOTIFICATION)) { + ArrayList watchedAttribs = new ArrayList(); + NotificationParam notifyParam = new NotificationParam(); + JsonObject ldObj = value.getAsJsonArray().get(0).getAsJsonObject(); + if (ldObj.has(NGSIConstants.NGSI_LD_ATTRIBUTES) + && ldObj.get(NGSIConstants.NGSI_LD_ATTRIBUTES).isJsonArray()) { + Iterator attribs = ldObj.getAsJsonArray(NGSIConstants.NGSI_LD_ATTRIBUTES).iterator(); + while (attribs.hasNext()) { + watchedAttribs + .add(attribs.next().getAsJsonObject().get(NGSIConstants.JSON_LD_ID).getAsString()); + + } + } + notifyParam.setAttributeNames(watchedAttribs); + EndPoint endPoint = new EndPoint(); + JsonObject jsonEndPoint = ldObj.getAsJsonArray(NGSIConstants.NGSI_LD_ENDPOINT).get(0).getAsJsonObject(); + if (jsonEndPoint.has(NGSIConstants.NGSI_LD_ACCEPT)) { + endPoint.setAccept(jsonEndPoint.getAsJsonArray(NGSIConstants.NGSI_LD_ACCEPT).get(0) + .getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } else { + endPoint.setAccept(AppConstants.NGB_APPLICATION_JSON); + } + try { + endPoint.setUri(new URI(jsonEndPoint.getAsJsonArray(NGSIConstants.NGSI_LD_URI).get(0) + .getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString())); + Map infoSettingNotifier = new HashMap(); + // add endpoint notification notifierInfo for deserialization + if (jsonEndPoint.has(NGSIConstants.NGSI_LD_NOTIFIERINFO) + && jsonEndPoint.get(NGSIConstants.NGSI_LD_NOTIFIERINFO).isJsonArray()) { + JsonObject info = jsonEndPoint.getAsJsonArray(NGSIConstants.NGSI_LD_NOTIFIERINFO).get(0).getAsJsonObject(); + String mqttQos = info.getAsJsonArray(NGSIConstants.NGSI_LD_MQTT_QOS).get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString(); + String mqttVersion = info.getAsJsonArray(NGSIConstants.NGSI_LD_MQTT_VERSION).get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString(); + infoSettingNotifier.put(NGSIConstants.MQTT_QOS, mqttQos); + infoSettingNotifier.put(NGSIConstants.MQTT_VERSION, mqttVersion); + endPoint.setNotifierInfo(infoSettingNotifier); + } + } catch (URISyntaxException e) { + throw new JsonParseException(e); + } + notifyParam.setEndPoint(endPoint); + if (ldObj.has(NGSIConstants.NGSI_LD_FORMAT) && ldObj.getAsJsonArray(NGSIConstants.NGSI_LD_FORMAT).get(0) + .getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE) != null) { + String formatString = ldObj.getAsJsonArray(NGSIConstants.NGSI_LD_FORMAT).get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString(); + if (formatString.equalsIgnoreCase("keyvalues")) { + notifyParam.setFormat(Format.keyValues); + } else if (formatString.equalsIgnoreCase("normalized")) { + notifyParam.setFormat(Format.normalized); + } + } else { + // Default + notifyParam.setFormat(Format.normalized); + } + if (ldObj.has(NGSIConstants.NGSI_LD_LAST_FAILURE)) { + TemporalAccessor temp = SerializationTools.formatter + .parse(ldObj.getAsJsonArray(NGSIConstants.NGSI_LD_LAST_FAILURE).get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString()); + notifyParam.setLastFailedNotification(new Date(Instant.from(temp).toEpochMilli())); + } + if (ldObj.has(NGSIConstants.NGSI_LD_LAST_SUCCESS)) { + TemporalAccessor temp = SerializationTools.formatter + .parse(ldObj.getAsJsonArray(NGSIConstants.NGSI_LD_LAST_SUCCESS).get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString()); + notifyParam.setLastNotification(new Date(Instant.from(temp).toEpochMilli())); + + } + if (ldObj.has(NGSIConstants.NGSI_LD_TIMES_SEND)) { + notifyParam.setTimesSent(ldObj.getAsJsonArray(NGSIConstants.NGSI_LD_TIMES_SEND).get(0) + .getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsInt()); + } + result.setNotification(notifyParam); + + } else if (key.equals(NGSIConstants.NGSI_LD_QUERY)) { + result.setLdQuery( + value.getAsJsonArray().get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } else if (key.equals(NGSIConstants.NGSI_LD_WATCHED_ATTRIBUTES)) { + Iterator it = value.getAsJsonArray().iterator(); + ArrayList watched = new ArrayList(); + while (it.hasNext()) { + watched.add(it.next().getAsJsonObject().get(NGSIConstants.JSON_LD_ID).getAsString()); + } + result.setAttributeNames(watched); + } else if (key.equals(NGSIConstants.NGSI_LD_THROTTLING)) { + result.setThrottling( + value.getAsJsonArray().get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsInt()); + } else if (key.equals(NGSIConstants.NGSI_LD_TIME_INTERVAL)) { + result.setTimeInterval( + value.getAsJsonArray().get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsInt()); + } else if (key.equals(NGSIConstants.NGSI_LD_EXPIRES)) { + try { + result.setExpires(SerializationTools.date2Long(value.getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString())); + } catch (Exception e) { + throw new JsonParseException(e); + } + } else if (key.equals(NGSIConstants.NGSI_LD_STATUS)) { + result.setStatus( + value.getAsJsonArray().get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } else if (key.equals(NGSIConstants.NGSI_LD_DESCRIPTION)) { + result.setDescription( + value.getAsJsonArray().get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } + + } + if (result.getNotification() == null) { + throw new JsonParseException("no notification parameter provided"); + } + // if (result.getId() == null) { + // + // } + return result; + } + + @Override + public JsonElement serialize(Subscription src, Type typeOfSrc, JsonSerializationContext context) { + JsonObject top = new JsonObject(); + // remote subs have no id yet + if (src.getId() != null) { + top.add(NGSIConstants.JSON_LD_ID, context.serialize(src.getId().toString())); + } + top.add(NGSIConstants.JSON_LD_TYPE, context.serialize(src.getType())); + JsonArray temp = new JsonArray(); + if (src.getEntities() != null) { + for (EntityInfo info : src.getEntities()) { + JsonObject entityObj = new JsonObject(); + if (info.getId() != null) { + JsonArray temp2 = new JsonArray(); + temp2.add(info.getId().toString()); + entityObj.add(NGSIConstants.JSON_LD_ID, new JsonPrimitive(info.getId().toString()));// temp2); + } + if (info.getType() != null) { + JsonArray temp2 = new JsonArray(); + temp2.add(info.getType()); + entityObj.add(NGSIConstants.JSON_LD_TYPE, temp2); + } + if (info.getIdPattern() != null) { + JsonArray temp2 = new JsonArray(); + JsonObject tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_VALUE, context.serialize(info.getIdPattern())); + + temp2.add(tempObj); + entityObj.add(NGSIConstants.NGSI_LD_ID_PATTERN, temp2); + } + temp.add(entityObj); + } + if (temp.size() > 0) { + top.add(NGSIConstants.NGSI_LD_ENTITIES, temp); + } + } + if (src.getLdGeoQuery() != null) { + temp = new JsonArray(); + JsonObject geoObj = new JsonObject(); + JsonArray coordArray = new JsonArray(); + for (Double coordinate : src.getLdGeoQuery().getCoordinates()) { + JsonObject tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_VALUE, context.serialize(coordinate)); + coordArray.add(tempObj); + } + geoObj.add(NGSIConstants.NGSI_LD_COORDINATES, coordArray); + JsonArray temp2 = new JsonArray(); + JsonObject tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_VALUE, context.serialize(src.getLdGeoQuery().getGeometry().toString())); + temp2.add(tempObj); + geoObj.add(NGSIConstants.NGSI_LD_GEOMETRY, temp2); + if (src.getLdGeoQuery().getGeoRelation() != null) { + temp2 = new JsonArray(); + tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_VALUE, + context.serialize(src.getLdGeoQuery().getGeoRelation().getABNFString())); + temp2.add(tempObj); + geoObj.add(NGSIConstants.NGSI_LD_GEO_REL, temp2); + } + temp.add(geoObj); + top.add(NGSIConstants.NGSI_LD_GEO_QUERY, temp); + } + temp = new JsonArray(); + JsonObject notificationObj = new JsonObject(); + JsonArray attribs = new JsonArray(); + JsonObject tempObj; + JsonArray tempArray; + if (src.getNotification() != null) { + NotificationParam notification = src.getNotification(); + if (notification.getAttributeNames() != null) { + for (String attrib : notification.getAttributeNames()) { + tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_ID, context.serialize(attrib)); + attribs.add(tempObj); + } + notificationObj.add(NGSIConstants.NGSI_LD_ATTRIBUTES, attribs); + } + + JsonObject endPoint = new JsonObject(); + JsonArray endPointArray = new JsonArray(); + + if (notification.getEndPoint() != null) { + + if (notification.getEndPoint().getAccept() != null) { + tempArray = new JsonArray(); + tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_VALUE, context.serialize(notification.getEndPoint().getAccept())); + tempArray.add(tempObj); + endPoint.add(NGSIConstants.NGSI_LD_ACCEPT, tempArray); + } + if (notification.getEndPoint().getUri() != null) { + tempArray = new JsonArray(); + tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_VALUE, + context.serialize(notification.getEndPoint().getUri().toString())); + tempArray.add(tempObj); + endPoint.add(NGSIConstants.NGSI_LD_URI, tempArray); + } + // add endpoint notification notifierInfo for serialization + if (notification.getEndPoint().getNotifierInfo() != null) { + JsonObject notifierEndPoint = new JsonObject(); + JsonArray notifierEndPointArray = new JsonArray(); + if (notification.getEndPoint().getNotifierInfo().get(NGSIConstants.MQTT_QOS) != null) { + tempArray = new JsonArray(); + tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_VALUE, context + .serialize(notification.getEndPoint().getNotifierInfo().get(NGSIConstants.MQTT_QOS))); + tempArray.add(tempObj); + notifierEndPoint.add(NGSIConstants.NGSI_LD_MQTT_QOS, tempArray); + } + if (notification.getEndPoint().getNotifierInfo().get(NGSIConstants.MQTT_VERSION) != null) { + tempArray = new JsonArray(); + tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_VALUE, context.serialize( + notification.getEndPoint().getNotifierInfo().get(NGSIConstants.MQTT_VERSION))); + tempArray.add(tempObj); + notifierEndPoint.add(NGSIConstants.NGSI_LD_MQTT_VERSION, tempArray); + } + + notifierEndPointArray.add(notifierEndPoint); + endPoint.add(NGSIConstants.NGSI_LD_NOTIFIERINFO, notifierEndPointArray); + endPointArray.add(endPoint); + notificationObj.add(NGSIConstants.NGSI_LD_ENDPOINT, endPointArray); + } else { + endPointArray.add(endPoint); + notificationObj.add(NGSIConstants.NGSI_LD_ENDPOINT, endPointArray); + } + } + if (notification.getFormat() != null) { + tempArray = new JsonArray(); + tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_VALUE, context.serialize(notification.getFormat().toString())); + tempArray.add(tempObj); + notificationObj.add(NGSIConstants.NGSI_LD_FORMAT, tempArray); + } + if (notification.getLastFailedNotification() != null) { + tempArray = new JsonArray(); + tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_VALUE, context.serialize( + SerializationTools.formatter.format(notification.getLastFailedNotification().toInstant()))); + tempObj.add(NGSIConstants.JSON_LD_TYPE, context.serialize(NGSIConstants.NGSI_LD_DATE_TIME)); + tempArray.add(tempObj); + notificationObj.add(NGSIConstants.NGSI_LD_LAST_FAILURE, tempArray); + } + if (notification.getLastNotification() != null) { + tempArray = new JsonArray(); + tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_VALUE, context.serialize( + SerializationTools.formatter.format(notification.getLastNotification().toInstant()))); + tempObj.add(NGSIConstants.JSON_LD_TYPE, context.serialize(NGSIConstants.NGSI_LD_DATE_TIME)); + tempArray.add(tempObj); + notificationObj.add(NGSIConstants.NGSI_LD_LAST_NOTIFICATION, tempArray); + } + if (notification.getLastSuccessfulNotification() != null) { + tempArray = new JsonArray(); + tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_VALUE, context.serialize( + SerializationTools.formatter.format(notification.getLastSuccessfulNotification().toInstant()))); + tempObj.add(NGSIConstants.JSON_LD_TYPE, context.serialize(NGSIConstants.NGSI_LD_DATE_TIME)); + tempArray.add(tempObj); + notificationObj.add(NGSIConstants.NGSI_LD_LAST_SUCCESS, tempArray); + } + if (notification.getTimesSent() > 0) { + notificationObj.add(NGSIConstants.NGSI_LD_TIMES_SEND, + SerializationTools.getValueArray(notification.getTimesSent())); + } + // { + // "https://uri.etsi.org/ngsi-ld/lastSuccess": [ + // { + // "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + // "@value": "2020-04-04T12:03:04Z" + // } + // ] + // } + // + // { + // "https://uri.etsi.org/ngsi-ld/timesSent": [ + // { + // "@value": "2020-04-04T12:03:04Z" + // } + // ] + // } + temp.add(notificationObj); + top.add(NGSIConstants.NGSI_LD_NOTIFICATION, temp); + } + if (src.getLdQuery() != null) { + tempArray = new JsonArray(); + tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_VALUE, context.serialize(src.getLdQuery())); + tempArray.add(tempObj); + top.add(NGSIConstants.NGSI_LD_QUERY, tempArray); + } + + attribs = new JsonArray(); + if (src.getAttributeNames() != null) { + for (String attrib : src.getAttributeNames()) { + tempObj = new JsonObject(); + tempObj.add(NGSIConstants.JSON_LD_ID, context.serialize(attrib)); + attribs.add(tempObj); + } + } + if (attribs.size() > 0) { + top.add(NGSIConstants.NGSI_LD_WATCHED_ATTRIBUTES, attribs); + } + if (src.getThrottling() != null && src.getTimeInterval() != 0) { + top.add(NGSIConstants.NGSI_LD_THROTTLING, SerializationTools.getValueArray(src.getThrottling())); + } + if (src.getTimeInterval() != null && src.getTimeInterval() != 0) { + top.add(NGSIConstants.NGSI_LD_TIME_INTERVAL, SerializationTools.getValueArray(src.getTimeInterval())); + } + if (src.getExpires() != null) { + top.add(NGSIConstants.NGSI_LD_EXPIRES, SerializationTools + .getValueArray(SerializationTools.formatter.format(Instant.ofEpochMilli(src.getExpires())))); + } + if (src.getStatus() != null) { + top.add(NGSIConstants.NGSI_LD_STATUS, SerializationTools.getValueArray(src.getStatus())); + } + if (src.getDescription() != null) { + top.add(NGSIConstants.NGSI_LD_DESCRIPTION, SerializationTools.getValueArray(src.getDescription())); + } + + return top; + } + +} \ No newline at end of file diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/TypedValueGsonAdapter.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/TypedValueGsonAdapter.java new file mode 100644 index 0000000000000000000000000000000000000000..f3122db262c6edfd22f0dc229735872829cd9b99 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/serialization/TypedValueGsonAdapter.java @@ -0,0 +1,25 @@ +package eu.neclab.ngsildbroker.commons.serialization; + +import java.lang.reflect.Type; + +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonPrimitive; +import com.google.gson.JsonSerializationContext; +import com.google.gson.JsonSerializer; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.TypedValue; + +public class TypedValueGsonAdapter implements JsonSerializer { + + + @Override + public JsonElement serialize(TypedValue src, Type typeOfSrc, JsonSerializationContext context) { + JsonObject top = new JsonObject(); + top.add(NGSIConstants.JSON_LD_TYPE, new JsonPrimitive(src.getType())); + top.add(NGSIConstants.JSON_LD_VALUE, context.serialize(src.getValue())); + return top; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/storage/StorageReaderDAO.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/storage/StorageReaderDAO.java new file mode 100644 index 0000000000000000000000000000000000000000..3a4ce3e71e94020994fd6a1cfc2e3ed1958b6753 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/storage/StorageReaderDAO.java @@ -0,0 +1,262 @@ +package eu.neclab.ngsildbroker.commons.storage; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import javax.annotation.PostConstruct; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.support.rowset.SqlRowSet; +import org.springframework.util.ReflectionUtils; + +import eu.neclab.ngsildbroker.commons.constants.DBConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.GeoqueryRel; +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; + +abstract public class StorageReaderDAO { + + private final static Logger logger = LogManager.getLogger(StorageReaderDAO.class); + + @Autowired + protected JdbcTemplate readerJdbcTemplate; + + @PostConstruct + public void init() { + readerJdbcTemplate.execute("SELECT 1"); // create connection pool and connect to database + } + + + public List query(QueryParams qp) { + + try { + String sqlQuery = translateNgsildQueryToSql(qp); + logger.info("NGSI-LD to SQL: " + sqlQuery); + //SqlRowSet result = readerJdbcTemplate.queryForRowSet(sqlQuery); + + return readerJdbcTemplate.queryForList(sqlQuery,String.class); + + + } catch(DataIntegrityViolationException e) { + //Empty result don't worry + logger.warn("SQL Result Exception::", e); + return new ArrayList(); + } catch (Exception e) { + logger.error("Exception ::", e); + } + return new ArrayList(); + + } + + public String getListAsJsonArray(List s) { + return "[" + String.join(",", s) + "]"; + } + + public List getLocalTypes() { + ArrayList result = new ArrayList(); + List> list = readerJdbcTemplate.queryForList( + "SELECT distinct type as type FROM entity WHERE type IS NOT NULL;"); + if(list == null ||list.isEmpty()) { + return null; + } + for (Map row : list) { + result.add(row.get("type").toString()); + } + return result; + } + + public List getAllTypes() { + ArrayList result = new ArrayList(); + List> list = readerJdbcTemplate.queryForList( + "SELECT distinct type as type FROM entity WHERE type IS NOT NULL UNION SELECT distinct entity_type as type FROM csourceinformation WHERE entity_type IS NOT NULL;"); + if(list == null ||list.isEmpty()) { + return null; + } + for (Map row : list) { + result.add(row.get("type").toString()); + } + return result; + } + + /* + * TODO: optimize sql queries by using prepared statements (if possible) + */ + protected String translateNgsildQueryToSql(QueryParams qp) throws ResponseException { + StringBuilder fullSqlWhereProperty = new StringBuilder(70); + + // https://stackoverflow.com/questions/3333974/how-to-loop-over-a-class-attributes-in-java + ReflectionUtils.doWithFields(qp.getClass(), field -> { + String dbColumn, sqlOperator; + String sqlWhereProperty = ""; + + field.setAccessible(true); + String queryParameter = field.getName(); + Object fieldValue = field.get(qp); + if (fieldValue != null) { + + logger.trace("Query parameter:" + queryParameter); + + String queryValue = ""; + if (fieldValue instanceof String) { + queryValue = fieldValue.toString(); + logger.trace("Query value: " + queryValue); + } + + switch (queryParameter) { + case NGSIConstants.QUERY_PARAMETER_IDPATTERN: + dbColumn = DBConstants.DBCOLUMN_ID; + sqlOperator = "~"; + sqlWhereProperty = dbColumn + " " + sqlOperator + " '" + queryValue + "'"; + break; + case NGSIConstants.QUERY_PARAMETER_TYPE: + case NGSIConstants.QUERY_PARAMETER_ID: + dbColumn = queryParameter; + if (queryValue.indexOf(",") == -1) { + sqlOperator = "="; + sqlWhereProperty = dbColumn + " " + sqlOperator + " '" + queryValue + "'"; + } else { + sqlOperator = "IN"; + sqlWhereProperty = dbColumn + " " + sqlOperator + " ('" + queryValue.replace(",", "','") + "')"; + } + break; + case NGSIConstants.QUERY_PARAMETER_ATTRS: + dbColumn = "data"; + sqlOperator = "?"; + if (queryValue.indexOf(",") == -1) { + sqlWhereProperty = dbColumn + " " + sqlOperator + "'" + queryValue + "'"; + } else { + sqlWhereProperty = "("+dbColumn + " " + sqlOperator + " '" + + queryValue.replace(",", "' OR " + dbColumn + " " + sqlOperator + "'") + "')"; + } + break; + case NGSIConstants.QUERY_PARAMETER_GEOREL: + if (fieldValue instanceof GeoqueryRel) { + GeoqueryRel gqr = (GeoqueryRel) fieldValue; + logger.trace("Georel value " + gqr.getGeorelOp()); + try { + sqlWhereProperty = translateNgsildGeoqueryToPostgisQuery(gqr, qp.getGeometry(), + qp.getCoordinates(), qp.getGeoproperty()); + } catch (ResponseException e) { + e.printStackTrace(); + } + } + break; + case NGSIConstants.QUERY_PARAMETER_QUERY: + sqlWhereProperty = queryValue; + break; + } + fullSqlWhereProperty.append(sqlWhereProperty); + if (!sqlWhereProperty.isEmpty()) + fullSqlWhereProperty.append(" AND "); + } + }); + + String tableDataColumn; + if (qp.getKeyValues()) { + if (qp.getIncludeSysAttrs()) { + tableDataColumn = DBConstants.DBCOLUMN_KVDATA; + } else { // without sysattrs at root level (entity createdat/modifiedat) + tableDataColumn = DBConstants.DBCOLUMN_KVDATA + " - '" + NGSIConstants.NGSI_LD_CREATED_AT + "' - '" + + NGSIConstants.NGSI_LD_MODIFIED_AT + "'"; + } + } else { + if (qp.getIncludeSysAttrs()) { + tableDataColumn = DBConstants.DBCOLUMN_DATA; + } else { + tableDataColumn = DBConstants.DBCOLUMN_DATA_WITHOUT_SYSATTRS; // default request + } + } + + String dataColumn = tableDataColumn; + if (qp.getAttrs() != null) { + String expandedAttributeList = "'" + NGSIConstants.JSON_LD_ID + "','" + NGSIConstants.JSON_LD_TYPE + "','" + + qp.getAttrs().replace(",", "','") + "'"; + if (qp.getIncludeSysAttrs()) { + expandedAttributeList += "," + NGSIConstants.NGSI_LD_CREATED_AT + "," + + NGSIConstants.NGSI_LD_MODIFIED_AT; + } + dataColumn = "(SELECT jsonb_object_agg(key, value) FROM jsonb_each(" + tableDataColumn + ") WHERE key IN ( " + + expandedAttributeList + "))"; + } + String sqlQuery = "SELECT " + dataColumn + " as data FROM " + DBConstants.DBTABLE_ENTITY + " "; + if (fullSqlWhereProperty.length() > 0) { + sqlQuery += "WHERE " + fullSqlWhereProperty.toString() + " 1=1 "; + } + int limit = qp.getLimit(); + int offSet = qp.getOffSet(); + + if(limit != -1) { + sqlQuery += "LIMIT " + limit + " "; + } + if(offSet != -1) { + sqlQuery += "OFFSET " + offSet + " "; + } + // order by ? + + return sqlQuery; + } + + // TODO: SQL input sanitization + // TODO: property of property + // [SPEC] spec is not clear on how to define a "property of property" in + // the geoproperty field. (probably using dots) + protected String translateNgsildGeoqueryToPostgisQuery(GeoqueryRel georel, String geometry, String coordinates, + String geoproperty, String dbColumn) throws ResponseException { + StringBuilder sqlWhere = new StringBuilder(50); + + String georelOp = georel.getGeorelOp(); + logger.trace(" Geoquery term georelOp: " + georelOp); + + if (dbColumn == null) { + dbColumn = DBConstants.NGSILD_TO_SQL_RESERVED_PROPERTIES_MAPPING_GEO.get(geoproperty); + if (dbColumn == null) { + sqlWhere.append("data @> '{\"" + geoproperty + "\": [{\"" + NGSIConstants.JSON_LD_TYPE + "\":[\"" + + NGSIConstants.NGSI_LD_GEOPROPERTY + "\"]}]}' AND "); + dbColumn = "ST_SetSRID(ST_GeomFromGeoJSON( " + "data#>>'{" + geoproperty + ",0," + + NGSIConstants.NGSI_LD_HAS_VALUE + ",0," + NGSIConstants.JSON_LD_VALUE + "}'), 4326)"; + } + } + + String referenceValue = "ST_SetSRID(ST_GeomFromGeoJSON('{\"type\": \"" + geometry + "\", \"coordinates\": " + + coordinates + " }'), 4326)"; + String sqlPostgisFunction = DBConstants.NGSILD_TO_POSTGIS_GEO_OPERATORS_MAPPING.get(georelOp); + + switch (georelOp) { + case NGSIConstants.GEO_REL_NEAR: + if (georel.getDistanceType() != null && georel.getDistanceValue() != null) { + if (georel.getDistanceType().equals(NGSIConstants.GEO_REL_MIN_DISTANCE)) + sqlWhere.append("NOT "); + sqlWhere.append(sqlPostgisFunction + "( " + dbColumn + "::geography, " + referenceValue + + "::geography, " + georel.getDistanceValue() + ") "); + } else { + throw new ResponseException(ErrorType.BadRequestData, + "GeoQuery: Type and distance are required for near relation"); + } + break; + case NGSIConstants.GEO_REL_WITHIN: + case NGSIConstants.GEO_REL_CONTAINS: + case NGSIConstants.GEO_REL_OVERLAPS: + case NGSIConstants.GEO_REL_INTERSECTS: + case NGSIConstants.GEO_REL_EQUALS: + case NGSIConstants.GEO_REL_DISJOINT: + sqlWhere.append(sqlPostgisFunction + "( " + dbColumn + ", " + referenceValue + ") "); + break; + default: + throw new ResponseException(ErrorType.BadRequestData, "Invalid georel operator: " + georelOp); + } + return sqlWhere.toString(); + } + + protected String translateNgsildGeoqueryToPostgisQuery(GeoqueryRel georel, String geometry, String coordinates, + String geoproperty) throws ResponseException { + return this.translateNgsildGeoqueryToPostgisQuery(georel, geometry, coordinates, geoproperty, null); + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/stream/interfaces/IProducerChannels.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/stream/interfaces/IProducerChannels.java new file mode 100644 index 0000000000000000000000000000000000000000..82e26fef1d2cba78264f06a737efca4918b23f56 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/stream/interfaces/IProducerChannels.java @@ -0,0 +1,15 @@ +package eu.neclab.ngsildbroker.commons.stream.interfaces; + +import org.springframework.cloud.stream.annotation.Output; +import org.springframework.messaging.MessageChannel; + +public interface IProducerChannels { + // public String entity="ENTITY"; +// String writeChannel = ""; +// +// // @Output(entity) +// // MessageChannel entity(); +// +// @Output(writeChannel) +// MessageChannel entityCreate(); +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/stream/service/CommonKafkaConfig.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/stream/service/CommonKafkaConfig.java new file mode 100644 index 0000000000000000000000000000000000000000..060cd0186aa8a7c80845592fe08c756c2f83e619 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/stream/service/CommonKafkaConfig.java @@ -0,0 +1,82 @@ +package eu.neclab.ngsildbroker.commons.stream.service; + +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; + +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.ByteArrayDeserializer; +import org.apache.kafka.common.serialization.ByteArraySerializer; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.kafka.KafkaProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.kafka.core.ConsumerFactory; +import org.springframework.kafka.core.DefaultKafkaConsumerFactory; +import org.springframework.kafka.core.DefaultKafkaProducerFactory; +import org.springframework.kafka.core.ProducerFactory; + +@Configuration +public class CommonKafkaConfig { + + @Value("${bootstrap.servers}") + String BOOTSTRAP_SERVERS; + + + @Autowired + KafkaProperties kafkaProperties; + + + + + @Bean + public Map producerConfigs() { + Map props = new HashMap<>(kafkaProperties.buildProducerProperties()); + props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); + props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class); + // list of host:port pairs used for establishing the initial connections to the + // Kakfa cluster + props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS); + props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, 104857600); + return props; + } + + @Bean + public Map consumerConfigs() { + Map props = new HashMap<>(kafkaProperties.buildConsumerProperties()); + props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); + props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class); + props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS); + props.put(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString()); + props.put(ConsumerConfig.FETCH_MAX_BYTES_CONFIG, 104857600); + return props; + } + + @Bean + public ProducerFactory producerFactory() { + return new DefaultKafkaProducerFactory<>(producerConfigs()); + } + + @Bean + public ProducerFactory producerObjFactory() { + return new DefaultKafkaProducerFactory<>(producerConfigs()); + } + + @Bean + public ConsumerFactory consumerFactory() { + return new DefaultKafkaConsumerFactory<>(consumerConfigs()); + } + + @Bean + public ConsumerFactory consumerObjFactory() { + return new DefaultKafkaConsumerFactory<>(consumerConfigs()); + } + + + + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/stream/service/KafkaConfig.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/stream/service/KafkaConfig.java new file mode 100644 index 0000000000000000000000000000000000000000..15885381b3b2ad6c5e5673bec4be1d9af8642478 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/stream/service/KafkaConfig.java @@ -0,0 +1,98 @@ +package eu.neclab.ngsildbroker.commons.stream.service; + +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; + +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.ByteArrayDeserializer; +import org.apache.kafka.common.serialization.ByteArraySerializer; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.kafka.KafkaProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; +import org.springframework.kafka.config.KafkaListenerContainerFactory; +import org.springframework.kafka.core.ConsumerFactory; +import org.springframework.kafka.core.DefaultKafkaConsumerFactory; +import org.springframework.kafka.core.DefaultKafkaProducerFactory; +import org.springframework.kafka.core.KafkaTemplate; +import org.springframework.kafka.core.ProducerFactory; +import org.springframework.kafka.listener.ConcurrentMessageListenerContainer; +import org.springframework.kafka.listener.ContainerProperties; +import org.springframework.kafka.listener.KafkaMessageListenerContainer; +import org.springframework.kafka.requestreply.ReplyingKafkaTemplate; + +@Configuration +public class KafkaConfig extends CommonKafkaConfig{ + + + @Value("${query.result.topic}") + String queryResultTopic; + + + + @Bean + public KafkaTemplate kafkaTemplate() { + return new KafkaTemplate<>(producerFactory()); + } + + @Bean + public ReplyingKafkaTemplate replyKafkaTemplate(ProducerFactory pf, + KafkaMessageListenerContainer container) { + return new ReplyingKafkaTemplate<>(pf, container); + + } + + @Bean + public KafkaMessageListenerContainer replyContainer(ConsumerFactory cf) { + ContainerProperties containerProperties = new ContainerProperties(queryResultTopic); + return new KafkaMessageListenerContainer<>(cf, containerProperties); + } + + @Bean + public KafkaListenerContainerFactory> kafkaListenerContainerFactory() { + ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); + factory.setConsumerFactory(consumerFactory()); + factory.setReplyTemplate(kafkaTemplate()); + return factory; + } + + // region Consumer config/factory and kafkalistener factory to use manual offset + // commit (acknowledge method) + + /* + * https://stackoverflow.com/questions/47427948/how-to-acknowledge-current- + * offset-in-spring-kafka-for-manual-commit + * https://github.com/contactsunny/spring-kafka-test + */ + + public Map consumerConfigsManualOffsetCommit() { + Map props = this.consumerConfigs(); + props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false); + return props; + } + + public ConsumerFactory consumerFactoryManualOffsetCommit() { + return new DefaultKafkaConsumerFactory<>(consumerConfigsManualOffsetCommit(), new StringDeserializer(), + new ByteArrayDeserializer()); + } + + @Bean + public KafkaListenerContainerFactory> kafkaListenerContainerFactoryManualOffsetCommit() { + ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); + factory.setConsumerFactory(consumerFactoryManualOffsetCommit()); + factory.setReplyTemplate(kafkaTemplate()); + factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL_IMMEDIATE); + factory.getContainerProperties().setSyncCommits(true); + + return factory; + } + + // endregion + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/stream/service/KafkaOps.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/stream/service/KafkaOps.java new file mode 100644 index 0000000000000000000000000000000000000000..decb39304c4194b05a3233eebe844a3d470b00ad --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/stream/service/KafkaOps.java @@ -0,0 +1,329 @@ +package eu.neclab.ngsildbroker.commons.stream.service; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.TimeUnit; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.DeleteTopicsResult; +import org.apache.kafka.clients.admin.ListTopicsResult; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.common.KafkaFuture; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.serialization.ByteArrayDeserializer; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.kafka.KafkaProperties; +import org.springframework.cloud.stream.binder.ExtendedProducerProperties; +import org.springframework.cloud.stream.binder.kafka.properties.KafkaBinderConfigurationProperties; +import org.springframework.cloud.stream.binder.kafka.properties.KafkaProducerProperties; +import org.springframework.cloud.stream.binder.kafka.provisioning.KafkaTopicProvisioner; +import org.springframework.kafka.support.KafkaHeaders; +import org.springframework.kafka.support.serializer.JsonDeserializer; +import org.springframework.messaging.Message; +import org.springframework.messaging.MessageChannel; +import org.springframework.messaging.MessageHeaders; +import org.springframework.messaging.support.MessageBuilder; +import org.springframework.retry.RetryOperations; +import org.springframework.retry.support.RetryTemplate; +import org.springframework.stereotype.Component; +import org.springframework.util.MimeTypeUtils; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import eu.neclab.ngsildbroker.commons.constants.KafkaConstants; +import eu.neclab.ngsildbroker.commons.datatypes.EntityDetails; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; + +@Component +public class KafkaOps { + + @Value("${bootstrap.servers}") + String BOOTSTRAP_SERVERS; + + AdminClient adminClient = null; + + @Autowired + ObjectMapper objectMapper; + + // Duration brokerPollDurationMillis = Duration.ofMillis(200); + // Duration brokerHeartbeatPollDurationinMillis=Duration.ofMillis(0); + long brokerPollDurationMillis = 200; + long brokerHeartbeatPollDurationinMillis = 0; + + // private final static Logger logger = LoggerFactory.getLogger(KafkaOps.class); + + public boolean pushToKafka(MessageChannel messageChannel, byte[] key, byte[] payload) throws ResponseException { + try { + boolean result = messageChannel + .send(MessageBuilder.withPayload(payload).setHeader(KafkaHeaders.MESSAGE_KEY, key) + .setHeader(MessageHeaders.CONTENT_TYPE, MimeTypeUtils.APPLICATION_JSON).build()); + return result; + } catch (Exception e) { + throw new ResponseException(ErrorType.KafkaWriteError, e.getMessage()); + } + } + + public void createTopic(String topicName) { + + KafkaProperties kafkaProperties = new KafkaProperties(); + kafkaProperties.setBootstrapServers(Collections.singletonList("localhost:9092")); + KafkaBinderConfigurationProperties kafkaBinderConfigurationProperties = new KafkaBinderConfigurationProperties( + kafkaProperties); + KafkaTopicProvisioner kafkaTopicProvisioner = new KafkaTopicProvisioner(kafkaBinderConfigurationProperties, + kafkaProperties); + RetryOperations metadataRetryOperations = new RetryTemplate(); + kafkaTopicProvisioner.setMetadataRetryOperations(metadataRetryOperations); + KafkaProducerProperties kafkaProducerProperties = new KafkaProducerProperties(); + ExtendedProducerProperties extendedProducerProperties = new ExtendedProducerProperties( + kafkaProducerProperties); + kafkaTopicProvisioner.provisionProducerDestination(topicName, extendedProducerProperties); + } + + public Set getTopics() throws Exception { + KafkaProperties kafkaProperties = new KafkaProperties(); + kafkaProperties.setBootstrapServers(Collections.singletonList("localhost:9092")); + Map adminClientProperties = kafkaProperties.buildAdminProperties(); + try (AdminClient adminClient = AdminClient.create(adminClientProperties)) { + ListTopicsResult listTopicsResult = adminClient.listTopics(); + KafkaFuture> namesFutures = listTopicsResult.names(); + Set names = namesFutures.get(30, TimeUnit.SECONDS); + return names; + } + } + + public void deleteTopic(Collection topicName) throws Exception { + KafkaProperties kafkaProperties = new KafkaProperties(); + kafkaProperties.setBootstrapServers(Collections.singletonList("localhost:9092")); + Map adminClientProperties = kafkaProperties.buildAdminProperties(); + try (AdminClient adminClient = AdminClient.create(adminClientProperties)) { + // TODO what's up with this result?? + DeleteTopicsResult deleteTopicResult = adminClient.deleteTopics(topicName); + } + } + + public boolean isMessageExists(String key, String topicname) { + Map entityMap = pullFromKafka(topicname); + return entityMap.containsKey(key); + } + + public String generateUUIDKey() { + return UUID.randomUUID().toString(); + } + + @SuppressWarnings("deprecation") + // TODO replace poll method... needs subscription listener when replaced + public Map pullFromKafka(String topicname) { + KafkaConsumer consumer = new KafkaConsumer(this.getProperties()); + try { + Map entityMap = new HashMap(2000); + boolean stop = false; + consumer.subscribe(new ArrayList(Collections.singletonList(topicname))); + consumer.poll(brokerHeartbeatPollDurationinMillis); + // Reading topic offset from beginning + consumer.seekToBeginning(consumer.assignment()); + while (!stop) { + // Request unread messages from the topic. + ConsumerRecords consumerRecords = consumer.poll(brokerPollDurationMillis); + Iterator> iterator = consumerRecords.iterator(); + if (iterator.hasNext()) { + while (iterator.hasNext()) { + ConsumerRecord record = iterator.next(); + entityMap.put(new String(record.key()), record.value()); + } + } else { + stop = true; + } + } + return entityMap; + } finally { + consumer.unsubscribe(); + consumer.close(); + } + } + + @SuppressWarnings("deprecation") + // TODO replace poll method... needs subscription listener when replaced + public Map getAllEntitiesDetails() throws IOException { + KafkaConsumer consumer = new KafkaConsumer(this.getProperties()); + try { + Map entityMap = new HashMap(2000); + boolean stop = false; + consumer.subscribe(new ArrayList(Collections.singletonList(KafkaConstants.ENTITY_TOPIC))); + consumer.poll(brokerHeartbeatPollDurationinMillis); + // Reading topic offset from beginning + consumer.seekToBeginning(consumer.assignment()); + while (!stop) { + // Request unread messages from the topic. + ConsumerRecords consumerRecords = consumer.poll(brokerPollDurationMillis); + Iterator> iterator = consumerRecords.iterator(); + if (iterator.hasNext()) { + while (iterator.hasNext()) { + ConsumerRecord record = iterator.next(); + JsonNode entityJsonBody = objectMapper.readTree(record.value()); + boolean isDeletedMsg = entityJsonBody.isNull(); + if (isDeletedMsg) { + entityMap.remove(new String(record.key())); + } else { + entityMap.put(new String(record.key()), + new EntityDetails(new String(record.key()), record.partition(), record.offset())); + } + } + } else { + stop = true; + } + } + return entityMap; + } finally { + consumer.unsubscribe(); + consumer.close(); + } + } + + /** + * Method used for read message from topic + * + * @param key + * @param topicname + * @return byte[] + */ + public byte[] getMessage(String key, String topicname) { + Map entityMap = pullFromKafka(topicname, key); + return entityMap.get(key); + } + + @SuppressWarnings("deprecation") + // TODO replace poll method... needs subscription listener when replaced + public Map pullFromKafka(String topicname, String key) { + KafkaConsumer consumer = new KafkaConsumer(this.getProperties()); + try { + Map entityMap = new HashMap(2000); + boolean stop = false; + consumer.subscribe(new ArrayList(Collections.singletonList(topicname))); + consumer.poll(brokerHeartbeatPollDurationinMillis); + // Reading topic offset from beginning + consumer.seekToBeginning(consumer.assignment()); + + while (!stop) { + // Request unread messages from the topic. + ConsumerRecords consumerRecords = consumer.poll(brokerPollDurationMillis); + Iterator> iterator = consumerRecords.iterator(); + if (iterator.hasNext()) { + while (iterator.hasNext()) { + ConsumerRecord record = iterator.next(); + if (key.equals(new String(record.key()))) { + entityMap.put(new String(record.key()), record.value()); + } + } + } else { + stop = true; + } + } + return entityMap; + } finally { + consumer.unsubscribe(); + consumer.close(); + } + } + + /** + * Method used for set properties + * + * @return + */ + public Map getProperties() { + Map props = new HashMap<>(); + props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS); + props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class); + props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class); + props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");// seek to begining + props.put(ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString()); + props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, Integer.MAX_VALUE); + props.put(ConsumerConfig.FETCH_MAX_BYTES_CONFIG, Integer.MAX_VALUE); + props.put(JsonDeserializer.TRUSTED_PACKAGES, "*"); + return props; + } + + @SuppressWarnings("deprecation") + // TODO replace poll method... needs subscription listener when replaced + public byte[] getMessage(String topicname, String key, int partition, long offset) { + Map entityMap = new HashMap(); + Map props = getProperties(); + props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 1); + props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, Integer.MAX_VALUE); + props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false); + props.remove(ConsumerConfig.FETCH_MAX_BYTES_CONFIG); + KafkaConsumer consumer = new KafkaConsumer(props); + try { + consumer.subscribe(new ArrayList(Collections.singletonList(topicname))); + consumer.poll(brokerHeartbeatPollDurationinMillis); + consumer.seek(new TopicPartition(topicname, partition), offset); + boolean flag = true; + int retry = 0; + while (flag && retry < 3) { + ConsumerRecords consumerRecords = consumer.poll(brokerPollDurationMillis); + consumerRecords.forEach(record -> { + entityMap.put(new String(record.key()), record.value()); + }); + if (entityMap.containsKey(key)) { + break; + } else { + System.out.println( + "not found in try ::" + retry + " for partition ::" + partition + " & offset ::" + offset); + retry++; + } + } + return entityMap.get(key); + } finally { + consumer.unsubscribe(); + consumer.close(); + } + } + + @SuppressWarnings("deprecation") + // TODO replace poll method... needs subscription listener when replaced + public byte[] getMessageDetails(String topicName, String key) { + KafkaConsumer consumer = new KafkaConsumer(this.getProperties()); + try { + boolean stop = false; + Map entityMap = new HashMap(2000); + consumer.subscribe(new ArrayList(Collections.singletonList(topicName))); + consumer.poll(brokerHeartbeatPollDurationinMillis); + // Reading topic offset from beginning + consumer.seekToBeginning(consumer.assignment()); + while (!stop) { + // Request unread messages from the topic. + ConsumerRecords consumerRecords = consumer.poll(brokerPollDurationMillis); + Iterator> iterator = consumerRecords.iterator(); + if (iterator.hasNext()) { + while (iterator.hasNext()) { + ConsumerRecord record = iterator.next(); + if (key.equals(new String(record.key()))) { + entityMap.put(key, record.value()); + } + } + } else { + stop = true; + } + } + return entityMap.get(key); + } finally { + consumer.unsubscribe(); + consumer.close(); + } + } + + public static String getMessageKey(Message message) { + Object key = message.getHeaders().get(KafkaHeaders.RECEIVED_MESSAGE_KEY); + return (String) key; + } +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/swaggerConfig/SwaggerConfigDetails.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/swaggerConfig/SwaggerConfigDetails.java new file mode 100644 index 0000000000000000000000000000000000000000..febb77ecd425f96ce13dab8bd241fd30912ea61f --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/swaggerConfig/SwaggerConfigDetails.java @@ -0,0 +1,46 @@ +package eu.neclab.ngsildbroker.commons.swaggerConfig; + +import java.util.Collections; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import springfox.documentation.builders.PathSelectors; +import springfox.documentation.builders.RequestHandlerSelectors; +import springfox.documentation.service.ApiInfo; +import springfox.documentation.service.Contact; +import springfox.documentation.spi.DocumentationType; +import springfox.documentation.spring.web.plugins.Docket; +import springfox.documentation.swagger2.annotations.EnableSwagger2; + + + +@Configuration +@EnableSwagger2 +public class SwaggerConfigDetails { + + @Bean + public Docket api() { + return new Docket(DocumentationType.SWAGGER_2) + .select() + .apis(RequestHandlerSelectors.basePackage("eu.neclab.ngsildbroker")) + .paths(PathSelectors.any()) + .build() + .apiInfo(getApiInformation() ); + } + + private ApiInfo getApiInformation(){ + return new ApiInfo("Scorpio Broker APIs", + "Description of CRUD operations", + "1.0", + "API Terms of Service URL", + new Contact("GitHub", AppConstants.SWAGGER_WEBSITE_LINK, AppConstants.SWAGGER_CONTACT_LINK), + "API License", + "API License URL", + Collections.emptyList() + ); + } +} + diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/EntityTools.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/EntityTools.java new file mode 100644 index 0000000000000000000000000000000000000000..92fa6471ba2b14e8acf81286418f0f7f1b6b6216 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/EntityTools.java @@ -0,0 +1,85 @@ +package eu.neclab.ngsildbroker.commons.tools; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Date; +import java.util.List; +import java.util.UUID; + +import eu.neclab.ngsildbroker.commons.datatypes.CSourceNotification; +import eu.neclab.ngsildbroker.commons.datatypes.CSourceRegistration; +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.datatypes.Notification; +import eu.neclab.ngsildbroker.commons.enums.TriggerReason; + +public abstract class EntityTools { + + private static final String BROKER_PREFIX = "ngsildbroker:"; + + public static URI getRandomID(String prefix) throws URISyntaxException { + if (prefix == null) { + prefix = ":"; + } + if (!prefix.endsWith(":")) { + prefix += ":"; + } + URI result; + + result = new URI(BROKER_PREFIX + prefix + UUID.randomUUID().getLeastSignificantBits()); + return result; + + } + + public static List squashCSourceNotifications(List data) { + List newData = new ArrayList(); + List updatedData = new ArrayList(); + List deletedData = new ArrayList(); + List result = new ArrayList(); + for (CSourceNotification notification : data) { + switch (notification.getTriggerReason()) { + case newlyMatching: + newData.addAll(notification.getData()); + break; + case updated: + updatedData.addAll(notification.getData()); + break; + case noLongerMatching: + deletedData.addAll(notification.getData()); + break; + default: + break; + + } + + } + long now = System.currentTimeMillis(); + try { + if(!newData.isEmpty()) { + result.add(new CSourceNotification(getRandomID("csource"),data.get(0).getSubscriptionId(),new Date(now),TriggerReason.newlyMatching,newData, data.get(0).getErrorMsg(),data.get(0).getErrorType(),data.get(0).getShortErrorMsg(),data.get(0).isSuccess())); + } + if(!updatedData.isEmpty()) { + result.add(new CSourceNotification(getRandomID("csource"),data.get(0).getSubscriptionId(),new Date(now),TriggerReason.updated,updatedData, data.get(0).getErrorMsg(),data.get(0).getErrorType(),data.get(0).getShortErrorMsg(),data.get(0).isSuccess())); + } + if(!deletedData.isEmpty()) { + result.add(new CSourceNotification(getRandomID("csource"),data.get(0).getSubscriptionId(),new Date(now),TriggerReason.noLongerMatching,deletedData, data.get(0).getErrorMsg(),data.get(0).getErrorType(),data.get(0).getShortErrorMsg(),data.get(0).isSuccess())); + } + } catch (URISyntaxException e) { + //left empty intentionally should never happen + throw new AssertionError(); + } + + return result; + } + + public static Notification squashNotifications(List data) { + List newData = new ArrayList(); + for (Notification notification : data) { + newData.addAll(notification.getData()); + } + return new Notification(data.get(0).getId(), System.currentTimeMillis(), + data.get(0).getSubscriptionId(), newData, data.get(0).getErrorMsg(), data.get(0).getErrorType(), + data.get(0).getShortErrorMsg(), data.get(0).isSuccess()); + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/ErrorAwareResponseHandler.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/ErrorAwareResponseHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..8da7233ad098797fee22da7d57fb06f521fd6dda --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/ErrorAwareResponseHandler.java @@ -0,0 +1,31 @@ +package eu.neclab.ngsildbroker.commons.tools; + +import java.io.IOException; + +import org.apache.http.HttpEntity; +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.apache.http.impl.client.BasicResponseHandler; +import org.apache.http.util.EntityUtils; + +import eu.neclab.ngsildbroker.commons.exceptions.HttpErrorResponseException; + + +public class ErrorAwareResponseHandler extends BasicResponseHandler { + + private static final int MIN_NON_SUCCESSFUL_STATUS = 300; + + @Override + public String handleResponse(final HttpResponse response) + throws IOException { + StatusLine statusLine = response.getStatusLine(); + HttpEntity entity = response.getEntity(); + String body = entity == null ? null : EntityUtils.toString(entity); + EntityUtils.consume(entity); + if (statusLine.getStatusCode() >= MIN_NON_SUCCESSFUL_STATUS) { + throw new HttpErrorResponseException(statusLine.getStatusCode(), + statusLine.getReasonPhrase()); + } + return body; + } +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/HTTPMethod.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/HTTPMethod.java new file mode 100644 index 0000000000000000000000000000000000000000..47bc8b3d2cb02ce4c42d9c8fcea157e10110c837 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/HTTPMethod.java @@ -0,0 +1,32 @@ +package eu.neclab.ngsildbroker.commons.tools; + +public enum HTTPMethod { + /** GET method. */ + GET, + /** POST method. */ + POST, + /** PUT method. */ + PUT, + /** DELETE method. */ + DELETE, + /** HEAD method. */ + HEAD, + /** OPTIONS method. */ + OPTIONS; + + /** + * Get the method out of its name. + * + * @param methodString + * the name of the method + * @return an instance of this enum representing the method + */ + public static HTTPMethod getMethod(String methodString) { + for (HTTPMethod method : HTTPMethod.values()) { + if (method.name().equals(methodString)) { + return method; + } + } + throw new AssertionError("Unknown method"); + } +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/HttpUtils.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/HttpUtils.java new file mode 100644 index 0000000000000000000000000000000000000000..1311091cd1456c8f90a5ace570376a2cb44ef790 --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/HttpUtils.java @@ -0,0 +1,1068 @@ +package eu.neclab.ngsildbroker.commons.tools; + +import java.io.BufferedReader; +import java.io.ByteArrayOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.UnsupportedEncodingException; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URL; +import java.net.URLDecoder; +import java.security.KeyManagementException; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.concurrent.TimeUnit; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.zip.ZipEntry; +import java.util.zip.ZipOutputStream; + +import javax.net.ssl.SSLContext; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.apache.http.HttpEntityEnclosingRequest; +import org.apache.http.HttpHost; +import org.apache.http.HttpRequest; +import org.apache.http.HttpResponse; +import org.apache.http.StatusLine; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.client.ResponseHandler; +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpHead; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.apache.http.client.methods.HttpRequestBase; +import org.apache.http.config.RegistryBuilder; +import org.apache.http.conn.params.ConnRouteParams; +import org.apache.http.conn.socket.ConnectionSocketFactory; +import org.apache.http.conn.socket.PlainConnectionSocketFactory; +import org.apache.http.conn.ssl.AllowAllHostnameVerifier; +import org.apache.http.conn.ssl.NoopHostnameVerifier; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.FileEntity; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.DefaultHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; +import org.apache.http.params.HttpParams; +import org.apache.http.ssl.SSLContextBuilder; +import org.apache.http.util.EntityUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.http.ResponseEntity.BodyBuilder; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.HttpErrorResponseException; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ldcontext.CompactedJson; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; + +/** + * A utility class to handle HTTP Requests and Responses. + * + * @author the scorpio team + * + */ +@SuppressWarnings("deprecation") +public final class HttpUtils { + + /** Timeout for all requests to respond. */ + private static final Integer REQ_TIMEOUT_MS = 10000; + + private static HttpUtils SINGLETON; + + private static final int BUFFER_SIZE = 1024; + + private static final Logger LOG = LoggerFactory.getLogger(HttpUtils.class); + + private static final int DEFAULT_PROXY_PORT = 8080; + + private HttpHost httpProxy = null; + + private ContextResolverBasic contextResolver; + private Pattern headerPattern = Pattern.compile( + "((\\*\\/\\*)|(application\\/\\*)|(application\\/json)|(application\\/ld\\+json)|(application\\/n-quads))(\\s*\\;\\s*q=(\\d(\\.\\d)*))?\\s*\\,?\\s*"); + + private HttpUtils(ContextResolverBasic contextResolver) { + this.contextResolver = contextResolver; + // Nothing to do, but make sure not more than one instance is created. + } + + //Dummy instance with out context resolving. only used for gets and posts etc. + private static HttpUtils NULL_INSTANCE = new HttpUtils(null); + /** + * Returns the singleton instance of this class. + * + * @return an HttpUtils instance + */ + public static HttpUtils getInstance(ContextResolverBasic contextResolver) { + if(contextResolver == null) { + return NULL_INSTANCE; + } + if (SINGLETON == null) { + SINGLETON = new HttpUtils(contextResolver); + } + return SINGLETON; + } + + public static void doPreflightCheck(HttpServletRequest req, String payload) throws ResponseException { + String contentType = req.getHeader(HttpHeaders.CONTENT_TYPE); + if (contentType == null) { + throw new ResponseException(ErrorType.UnsupportedMediaType, "No content type header provided"); + } + if (!contentType.toLowerCase().contains("application/json") + && !contentType.toLowerCase().contains("application/ld+json")) { + throw new ResponseException(ErrorType.UnsupportedMediaType, + "Unsupported content type. Allowed are application/json and application/ld+json. You provided " + + contentType); + } + if (payload == null || payload.trim().isEmpty() || payload.trim().equals("{}") || payload.trim().equals("[]")) { + throw new ResponseException(ErrorType.BadRequestData, "empty payloads are not allowed in this operation"); + } + + if (contentType.toLowerCase().contains("application/json") && payload.contains("@context")) { + throw new ResponseException(ErrorType.BadRequestData, + "data of the content type application/json cannot provide an @context entry in the body"); + } + + } + + public String expandPayload(HttpServletRequest request, String payload, int endPoint) + throws ResponseException, MalformedURLException, UnsupportedEncodingException { + + String ldResolved = null; + + final String contentType = request.getContentType(); + final List linkHeaders = HttpUtils.parseLinkHeader(request, NGSIConstants.HEADER_REL_LDCONTEXT); + + // PayloadValidationRule rule = new PayloadValidationRule(); + // rule.validateEntity(payload, request); + + if (contentType.equalsIgnoreCase(AppConstants.NGB_APPLICATION_JSON)) { + + ldResolved = contextResolver.expand(payload, linkHeaders, true, endPoint); + + } else if (contentType.equalsIgnoreCase(AppConstants.NGB_APPLICATION_JSONLD)) { + if (!payload.contains("@context")) { + throw new ResponseException(ErrorType.BadRequestData, + "You have to provide an @context entry in the body with Content-Type: " + + AppConstants.NGB_APPLICATION_JSONLD); + } + + ldResolved = contextResolver.expand(payload, null, true, endPoint); + + } else { + throw new ResponseException(ErrorType.BadRequestData, + "Missing or unknown Content-Type header. Content-Type header is mandatory. Only application/json or application/ld+json mime type is allowed"); + } + + return ldResolved; + } + + /** + * Set the HTTP Proxy that will be used for all future requests. + * + * @param httpProxy a URL with the HTTP proxy + */ + public static void setHttpProxy(URL httpProxy) { + if (httpProxy != null) { + int port = httpProxy.getPort(); + if (port == -1) { + port = DEFAULT_PROXY_PORT; + } + SINGLETON.httpProxy = new HttpHost(httpProxy.getHost(), port, httpProxy.getProtocol()); + } else { + SINGLETON.httpProxy = null; + } + } + + private String doHTTPRequest(URI uri, HTTPMethod method, Object body, Map additionalHeaders, + AuthScope authScope, UsernamePasswordCredentials credentials) throws IOException { + ErrorAwareResponseHandler handler = new ErrorAwareResponseHandler(); + return doHTTPRequest2(uri, method, body, additionalHeaders, authScope, credentials, handler); + } + + private String doHTTPRequest(URI uri, HTTPMethod method, Object body, Map additionalHeaders, + AuthScope authScope, UsernamePasswordCredentials credentials, ResponseHandler handler) + throws IOException { + DefaultHttpClient httpClient = new DefaultHttpClient(); + HttpParams params = httpClient.getParams(); + params.setParameter("http.socket.timeout", REQ_TIMEOUT_MS); + params.setParameter("http.connection.timeout", REQ_TIMEOUT_MS); + params.setParameter("http.connection-manager.timeout", REQ_TIMEOUT_MS.longValue()); + params.setParameter("http.protocol.head-body-timeout", REQ_TIMEOUT_MS); + + if (httpProxy != null) { + params.setParameter(ConnRouteParams.DEFAULT_PROXY, httpProxy); + } + if (credentials != null) { + httpClient.getCredentialsProvider().setCredentials(authScope, credentials); + } + HttpRequestBase request; + + switch (method) { + case GET: + request = new HttpGet(uri); + break; + case POST: + HttpPost postRequest = new HttpPost(uri); + if (body instanceof String) { + addBody((String) body, postRequest); + } else if (body instanceof File) { + addBody((File) body, postRequest); + } else if (body instanceof byte[]) { + addBody((byte[]) body, postRequest); + } + + request = postRequest; + break; + case PUT: + HttpPut putRequest = new HttpPut(uri); + if (body instanceof String) { + addBody((String) body, putRequest); + } else if (body instanceof File) { + addBody((File) body, putRequest); + } + + request = putRequest; + break; + case DELETE: + request = new HttpDelete(uri); + break; + case HEAD: + request = new HttpHead(uri); + break; + default: + httpClient.close(); + throw new AssertionError("Unknown method: " + method); + } + if (additionalHeaders != null && !additionalHeaders.isEmpty()) { + addHeaders(additionalHeaders, request); + } + try { + return httpClient.execute(request, handler); + } finally { + httpClient.getConnectionManager().shutdown(); + httpClient.close(); + + } + } + + /** + * Perform an HTTP request using a disposable HTTP client. + * + * @param httpClient the HTTP client + * @return the body of the message + * @throws IOException if an error occurs during the request + * @throws HttpErrorResponseException if a non 2xx response code is returned + * (this is an unchecked exception!) + */ + + private String doHTTPRequest2(URI uri, HTTPMethod method, Object body, Map additionalHeaders, + AuthScope authScope, UsernamePasswordCredentials credentials, ResponseHandler handler) + throws IOException { + + CloseableHttpClient httpClient; + try { + httpClient = getClient(authScope, credentials); + } catch (Exception e) { + e.printStackTrace(); + return null; + } + HttpRequestBase request; + switch (method) { + case GET: + request = new HttpGet(uri); + break; + case POST: + HttpPost postRequest = new HttpPost(uri); + if (body instanceof String) { + addBody((String) body, postRequest); + } else if (body instanceof File) { + addBody((File) body, postRequest); + } else if (body instanceof byte[]) { + addBody((byte[]) body, postRequest); + } + + request = postRequest; + break; + case PUT: + HttpPut putRequest = new HttpPut(uri); + if (body instanceof String) { + addBody((String) body, putRequest); + } else if (body instanceof File) { + addBody((File) body, putRequest); + } + + request = putRequest; + break; + case DELETE: + request = new HttpDelete(uri); + break; + case HEAD: + request = new HttpHead(uri); + break; + default: + httpClient.close(); + throw new AssertionError("Unknown method: " + method); + } + if (additionalHeaders != null && !additionalHeaders.isEmpty()) { + addHeaders(additionalHeaders, request); + } + try { + return httpClient.execute(request, handler); + } finally { + httpClient.close(); + + } + } + + private CloseableHttpClient getClient(AuthScope authScope, UsernamePasswordCredentials credentials) + throws KeyManagementException, NoSuchAlgorithmException, KeyStoreException { + SSLContext sslContext = new SSLContextBuilder().loadTrustMaterial(null, (x509CertChain, authType) -> true) + .build(); + HttpClientBuilder temp = HttpClientBuilder.create().setSSLContext(sslContext) + .setConnectionManager(new PoolingHttpClientConnectionManager(RegistryBuilder + .create().register("http", PlainConnectionSocketFactory.INSTANCE) + .register("https", new SSLConnectionSocketFactory(sslContext, NoopHostnameVerifier.INSTANCE)) + .build())); + if (credentials != null) { + BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(authScope, credentials); + temp.setDefaultCredentialsProvider(credentialsProvider); + } + if (httpProxy != null) { + temp.setProxy(httpProxy); + } + + return temp.build(); + } + + private void addBody(File body, HttpEntityEnclosingRequest req) { + if (body != null) { + FileEntity fileEntity = new FileEntity(body); + req.setEntity(fileEntity); + } + + } + + private void addBody(byte[] body, HttpEntityEnclosingRequest req) { + if (body != null) { + ByteArrayEntity bodyEntity = new ByteArrayEntity(body, ContentType.APPLICATION_JSON); + req.setEntity(bodyEntity); + } + } + + private void addBody(String body, HttpEntityEnclosingRequest req) { + if (body != null) { + StringEntity bodyEntity = new StringEntity(body, ContentType.APPLICATION_JSON); + req.setEntity(bodyEntity); + } + } + + private void addHeaders(Map headers, HttpRequest req) { + for (Entry entry : headers.entrySet()) { + req.addHeader(entry.getKey(), entry.getValue()); + } + + } + + /** + * Perform a GET request on the URI. + * + * @param uri the URI to do the request on + * @return the body of the response + * @throws IOException generally if a communication problem occurs or + * specifically an {@link HttpErrorResponseException} if + * something other than HTTP 200 OK was returned + */ + public String doGet(URI uri) throws IOException { + return doGet(uri, null, null, null, null); + } + + public String doGet(URI uri, ResponseHandler handler) throws IOException { + return doGet(uri, null, handler); + } + + public String doGet(URI uri, Map headers) throws IOException { + return doGet(uri, headers, null); + } + + public String doGet(URI uri, Map headers, ResponseHandler handler) throws IOException { + return doGet(uri, headers, null, null, handler); + } + + /** + * Perform a GET request on the URI. + * + * @param uri the URI to do the request on + * @param authScope the authentication scope + * @param credentials the authentication credentials + * @return the body of the response + * @throws IOException generally if a communication problem occurs or + * specifically an {@link HttpErrorResponseException} if + * something other than HTTP 200 OK was returned + */ + public String doGet(URI uri, AuthScope authScope, UsernamePasswordCredentials credentials) throws IOException { + return doGet(uri, null, authScope, credentials, null); + } + + public String doGet(URI uri, AuthScope authScope, UsernamePasswordCredentials credentials, + ResponseHandler handler) throws IOException { + return doGet(uri, null, authScope, credentials, handler); + } + + public String doGet(URI uri, Map headers, AuthScope authScope, + UsernamePasswordCredentials credentials) throws IOException { + return doGet(uri, headers, authScope, credentials, null); + } + + public String doGet(URI uri, Map headers, AuthScope authScope, + UsernamePasswordCredentials credentials, ResponseHandler handler) throws IOException { + if (handler == null) { + return doHTTPRequest(uri, HTTPMethod.GET, null, headers, authScope, credentials); + } + return doHTTPRequest(uri, HTTPMethod.GET, null, headers, authScope, credentials, handler); + } + + /** + * Perform a PUT request on the URI. + * + * @param uri the URI to do the request on + * @param body the request body + * @return the body of the response + * @throws IOException generally if a communication problem occurs or + * specifically an {@link HttpErrorResponseException} if + * something other than HTTP 200 OK was returned + */ + public String doPut(URI uri, String body) throws IOException { + return doPut(uri, body, null, null); + } + + /** + * Perform a PUT request on the URI. + * + * @param uri the URI to do the request on + * @param body the request body + * @param authScope the authentication scope + * @param credentials the authentication credentials + * @return the body of the response + * @throws IOException generally if a communication problem occurs or + * specifically an {@link HttpErrorResponseException} if + * something other than HTTP 200 OK was returned + */ + public String doPut(URI uri, String body, AuthScope authScope, UsernamePasswordCredentials credentials) + throws IOException { + return doHTTPRequest(uri, HTTPMethod.PUT, body, null, authScope, credentials); + } + + /** + * Perform a POST request on the URI. + * + * @param uri the URI to do the request on + * @param body the request body + * @return the body of the response + * @throws IOException generally if a communication problem occurs or + * specifically an {@link HttpErrorResponseException} if + * something other than HTTP 200 OK was returned + */ + public String doPost(URI uri, Object body, Map additionalHeaders) throws IOException { + return doPost(uri, body, additionalHeaders, null, null); + } + + /** + * Perform a POST request on the URI. + * + * @param uri the URI to do the request on + * @param body the request body + * @param authScope the authentication scope + * @param credentials the authentication credentials + * @return the body of the response + * @throws IOException generally if a communication problem occurs or + * specifically an {@link HttpErrorResponseException} if + * something other than HTTP 200 OK was returned + */ + public String doPost(URI uri, Object body, Map additionalHeaders, AuthScope authScope, + UsernamePasswordCredentials credentials) throws IOException { + return doHTTPRequest(uri, HTTPMethod.POST, body, additionalHeaders, authScope, credentials); + } + + /** + * Perform a DELETE request on the URI. + * + * @param uri the URI to do the request on + * @return the body of the response + * @throws IOException generally if a communication problem occurs or + * specifically an {@link HttpErrorResponseException} if + * something other than HTTP 200 OK was returned + */ + public String doDelete(URI uri) throws IOException { + return doDelete(uri, null, null); + } + + /** + * Perform a POST request on the URI. + * + * @param uri the URI to do the request on + * @param authScope the authentication scope + * @param credentials the authentication credentials + * @return the body of the response + * @throws IOException generally if a communication problem occurs or + * specifically an {@link HttpErrorResponseException} if + * something other than HTTP 200 OK was returned + */ + public String doDelete(URI uri, AuthScope authScope, UsernamePasswordCredentials credentials) throws IOException { + return doHTTPRequest(uri, HTTPMethod.DELETE, null, null, authScope, credentials); + } + + /** + * Perform a HEAD request on the URI. + * + * @param uri the URI to do the request on + * @return the body of the response + * @throws IOException generally if a communication problem occurs or + * specifically an {@link HttpErrorResponseException} if + * something other than HTTP 200 OK was returned + */ + public String doHead(URI uri) throws IOException { + return doHead(uri, null, null); + } + + /** + * Perform a HEAD request on the URI. + * + * @param uri the URI to do the request on + * @param authScope the authentication scope + * @param credentials the authentication credentials + * @return the body of the response + * @throws IOException generally if a communication problem occurs or + * specifically an {@link HttpErrorResponseException} if + * something other than HTTP 200 OK was returned + */ + public String doHead(URI uri, AuthScope authScope, UsernamePasswordCredentials credentials) throws IOException { + return doHTTPRequest(uri, HTTPMethod.HEAD, null, null, authScope, credentials); + } + + /** + * Read and decode the request's body using the encoding specified in the + * request. + * + * @param req the request to read + * @return a String with the body + * @throws IOException if a connection error occurs + */ + public static String getBody(HttpServletRequest req) throws IOException { + BufferedReader reader = null; + try { + reader = req.getReader(); + StringBuilder body = new StringBuilder(); + String line; + while ((line = reader.readLine()) != null) { + body.append(line); + } + + return body.toString(); + } finally { + if (reader != null) { + reader.close(); + } + } + } + + /** + * Read and decode the response's body using the encoding in the response. If no + * encoding is found, UTF-8 is used. + * + * @param res the response object + * @return the body of the response + * @throws IOException if a communication problem occurs + */ + public static String getBody(HttpResponse res) throws IOException { + return EntityUtils.toString(res.getEntity(), "UTF-8"); + } + + /** + * Perform a HEAD request on the remoteURI and return true if the HTTP + * connection succeeds, regardless of the status code (potentially an error) + * received. + * + * @param remoteURI the URI to check + * @return true if the server serving URI responds at all + */ + public boolean isReachable(URI remoteURI) { + try { + doHead(remoteURI); + } catch (HttpErrorResponseException e) { + // That's ok, we got errors, but the server is reachable via the + // network. + return true; + } catch (IOException e) { + // The server is not reachable for whatever reason + return false; + } + // Got a 2xx message, all is good + return true; + } + + private static String getSanitizedHeader(HttpServletRequest req, String header) { + String dirtyHeader = req.getHeader(header); + if (dirtyHeader != null) { + return dirtyHeader.replaceAll("[\r\n]", ""); + } else { + return null; + } + } + + /** + * Enable CORS by setting the appropriate access headers required by pre-flight + * requests (OPTIONS method) and other non-same-origin requests. Note that the + * headers extracted from the request are always (re) URL encoded to prevent + * HTTP Response + * Splitting attacks + * + * @param req the request (needed due to the origin and methods it requires) + * @param resp the response with the right CORS headers + */ + private static void enableCORS(HttpServletRequest req, HttpServletResponse resp) { + // Sanitize the origin + String origin = getSanitizedHeader(req, "Origin"); + if (origin != null) { + resp.addHeader("Access-Control-Allow-Origin", origin); + if ("options".equalsIgnoreCase(req.getMethod())) { + resp.setHeader("Allow", "GET, HEAD, POST, PUT, DELETE, TRACE, OPTIONS"); + String headers = getSanitizedHeader(req, "Access-Control-Request-Headers"); + String method = getSanitizedHeader(req, "Access-Control-Request-Method"); + resp.addHeader("Access-Control-Allow-Methods", method); + resp.addHeader("Access-Control-Allow-Headers", headers); + } + } + // Fix ios6 caching post requests + if ("post".equalsIgnoreCase(req.getMethod())) { + resp.addHeader("Cache-Control", "no-cache"); + } + } + + /** + * Send the given response with a 200 OK status. + * + * @param req the request object + * @param response the response body + * @param resp the response object + */ + public static void sendResponse(HttpServletRequest req, String response, HttpServletResponse resp) { + enableCORS(req, resp); + resp.setContentType(AppConstants.NGB_APPLICATION_JSON); + try { + resp.getWriter().write(response); + } catch (IOException e) { + LOG.warn("IOException when sending a response: " + e.getMessage()); + } + + } + + /** + * Send an error with the given error code as status and the given message as + * reason. The body is HTML formatted and contains the message. + * + * + * @param req the request object + * @param errorCode the status code + * @param message reason for the error + * @param resp the response object + * + */ + public static void sendError(HttpServletRequest req, int errorCode, String message, HttpServletResponse resp) { + enableCORS(req, resp); + try { + LOG.warn(message); + resp.sendError(errorCode, message); + } catch (IOException ioe) { + LOG.warn("IOException when sending an error: " + message); + } + } + + /** + * Get the path out of the URL from this request, trimming out slashes (both at + * the beginning and end) and turning nulls into empty paths. + * + * @param req the request object + * @return the clean path + */ + public static String getCleanPath(HttpServletRequest req) { + String path = req.getPathInfo(); + if (!StringUtils.isSet(path)) { + return ""; + } + + try { + path = URLDecoder.decode(path, "UTF-8"); + } catch (UnsupportedEncodingException e) { + LOG.error("UTF-8 should be supported."); + throw new AssertionError(e); + } + + if (path.endsWith("/")) { + path = path.substring(0, path.length() - 1); + } + if (path.startsWith("/")) { + path = path.substring(1); + } + return path; + } + + /** + * Download the file pointed by the given URI and save it into the target file. + * + * @param uri the URI of the file to download + * @param target local file to store the download into + * @throws IOException if a communication error occurs. Specifically throw an + * {@link HttpErrorResponseException} if anything but a 200 + * OK is returned. + */ + public void downloadFile(URI uri, File target) throws IOException { + downloadFile(uri, target, null, null); + } + + /** + * Download the file pointed by the given URI and save it into the target file. + * + * @param uri the URI of the file to download + * @param target local file to store the download into + * @param authScope the authentication scope + * @param credentials username and password credentials for Basic authentication + * @throws IOException if a communication error occurs. Specifically throw an + * {@link HttpErrorResponseException} if anything but a 200 + * OK is returned. + */ + + public void downloadFile(URI uri, File target, AuthScope authScope, UsernamePasswordCredentials credentials) + throws IOException { + DefaultHttpClient httpClient = new DefaultHttpClient(); + if (httpProxy != null) { + httpClient.getParams().setParameter(ConnRouteParams.DEFAULT_PROXY, httpProxy); + } + if (credentials != null) { + httpClient.getCredentialsProvider().setCredentials(authScope, credentials); + } + + HttpResponse response; + response = httpClient.execute(new HttpGet(uri)); + StatusLine status = response.getStatusLine(); + httpClient.close(); + if (status.getStatusCode() != HttpServletResponse.SC_OK) { + throw new HttpErrorResponseException(status.getStatusCode(), status.getReasonPhrase()); + } + InputStream input = null; + OutputStream output = null; + byte[] buffer = new byte[BUFFER_SIZE]; + + try { + input = response.getEntity().getContent(); + output = new FileOutputStream(target); + + while (true) { + int length = input.read(buffer); + if (length > 0) { + output.write(buffer, 0, length); + } else { + input.close(); + break; + } + } + } finally { + if (output != null) { + output.close(); + } + if (input != null) { + input.close(); + } + } + } + + public static List getAtContext(HttpServletRequest req) { + return parseLinkHeader(req, NGSIConstants.HEADER_REL_LDCONTEXT); + } + + public static List parseLinkHeader(HttpServletRequest req, String headerRelLdcontext) { + return parseLinkHeader(req.getHeaders("Link"), headerRelLdcontext); + } + + public static List parseLinkHeader(Enumeration rawLinks, String headerRelLdcontext) { + + ArrayList result = new ArrayList(); + if (rawLinks == null) { + return result; + } + + while (rawLinks.hasMoreElements()) { + String[] rawLinkInfos = rawLinks.nextElement().split(";"); + boolean isWantedRel = false; + for (String rawLinkInfo : rawLinkInfos) { + if (rawLinkInfo.trim().startsWith("rel=")) { + String[] relInfo = rawLinkInfo.trim().split("="); + if (relInfo.length == 2 && (relInfo[1].equalsIgnoreCase(headerRelLdcontext) + || relInfo[1].equalsIgnoreCase("\"" + headerRelLdcontext + "\""))) { + isWantedRel = true; + } + break; + } + } + if (isWantedRel) { + String rawLink = rawLinkInfos[0]; + if (rawLink.trim().startsWith("<")) { + rawLink = rawLink.substring(rawLink.indexOf("<") + 1, rawLink.indexOf(">")); + } + result.add(rawLink); + } + + } + return result; + } + + public ResponseEntity generateReply(HttpServletRequest request, String reply) throws ResponseException { + return generateReply(request, reply, null); + + } + + public ResponseEntity generateReply(HttpServletRequest request, String reply, + HashMap> additionalHeaders) throws ResponseException { + return generateReply(request, reply, additionalHeaders, null); + } + + public ResponseEntity generateReply(HttpServletRequest request, String reply, + HashMap> additionalHeaders, List additionalContext) throws ResponseException { + return generateReply(request, reply, additionalHeaders, additionalContext, false); + } + + public static void main(String[] args) { + Pattern p = Pattern.compile("([\\w\\/\\+]+)(\\s*\\;\\s*q=(\\d\\.\\d))?"); + Matcher m = p.matcher("*/*"); + float q = 1; + String app = null; + String result = null; + while (m.find()) { + String floatString = m.group(3); + float newQ = 1; + if (floatString != null) { + newQ = Float.parseFloat(floatString); + } + if (result != null && (newQ <= q)) { + continue; + } + app = m.group(0); + if (app.equalsIgnoreCase("application/ld+json") || app.equalsIgnoreCase("application/*") + || app.equalsIgnoreCase("*/*")) { + result = "application/ld+json"; + } else if (app.equalsIgnoreCase("application/json")) { + result = "application/json"; + } + + } + + } + + private int parseAcceptHeader(Enumeration acceptHeaders) { + float q = 1; + int appGroup = -1; + + while (acceptHeaders.hasMoreElements()) { + String header = acceptHeaders.nextElement(); + + Matcher m = headerPattern.matcher(header.toLowerCase()); + while (m.find()) { + String floatString = m.group(8); + float newQ = 1; + int newAppGroup = -2; + if (floatString != null) { + newQ = Float.parseFloat(floatString); + } + if (appGroup != -1 && (newQ < q)) { + continue; + } + for (int i = 2; i <= 6; i++) { + if (m.group(i) == null) { + continue; + } + newAppGroup = i; + break; + } + if (newAppGroup > appGroup) { + appGroup = newAppGroup; + } + } + } + switch (appGroup) { + case 2: + case 3: + case 5: + return 2; // application/ld+json + case 4: + return 1; // application/json + case 6: + return 3;// application/n-quads + default: + return -1;// error + } + } + + public ResponseEntity generateReply(HttpServletRequest request, String reply, + HashMap> additionalHeaders, List additionalContext, boolean forceArrayResult) + throws ResponseException { + List requestAtContext = getAtContext(request); + if (additionalContext != null) { + requestAtContext.addAll(additionalContext); + } + + String replyBody; + + CompactedJson compacted = contextResolver.compact(reply, requestAtContext); + ArrayList temp = new ArrayList(); + if (additionalHeaders == null) { + additionalHeaders = new HashMap>(); + } + int sendingContentType = parseAcceptHeader(request.getHeaders(HttpHeaders.ACCEPT)); + switch (sendingContentType) { + case 1: + temp.add(AppConstants.NGB_APPLICATION_JSON); + replyBody = compacted.getCompacted(); + List links = additionalHeaders.get(HttpHeaders.LINK); + if (links == null) { + links = new ArrayList(); + additionalHeaders.put(HttpHeaders.LINK, links); + } + links.add("<" + compacted.getContextUrl() + + ">; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\""); + break; + case 2: + temp.add(AppConstants.NGB_APPLICATION_JSONLD); + if (compacted.getCompacted() == null || compacted.getCompacted().isEmpty() + || compacted.getCompacted().trim().equals("{ }") || compacted.getCompacted().trim().equals("{}")) { + replyBody = "{ }"; + } else { + replyBody = compacted.getCompactedWithContext(); + } + break; + case 3: + temp.add(AppConstants.NGB_APPLICATION_NQUADS); + replyBody = contextResolver.getRDF(reply); + break; + case -1: + default: + throw new ResponseException(ErrorType.InvalidRequest, "Provided accept types are not supported"); + } + + additionalHeaders.put(HttpHeaders.CONTENT_TYPE, temp); + if (forceArrayResult && !replyBody.startsWith("[")) { + if (replyBody.equals("{ }") || replyBody.equals("{}")) { + replyBody = "[]"; + } else { + replyBody = "[" + replyBody + "]"; + } + } + boolean compress = false; + String options = request.getParameter(NGSIConstants.QUERY_PARAMETER_OPTIONS); + if (options != null && options.contains(NGSIConstants.QUERY_PARAMETER_OPTIONS_COMPRESS)) { + compress = true; + } + return generateReply(replyBody, additionalHeaders, compress); + } + + public ResponseEntity generateReply(String replyBody, HashMap> additionalHeaders, + boolean compress) { + return generateReply(replyBody, additionalHeaders, HttpStatus.OK, compress); + } + + public ResponseEntity generateReply(String replyBody, HashMap> additionalHeaders, + HttpStatus status, boolean compress) { + BodyBuilder builder = ResponseEntity.status(status); + if (additionalHeaders != null) { + for (Entry> entry : additionalHeaders.entrySet()) { + for (String value : entry.getValue()) { + builder.header(entry.getKey(), value); + } + + } + } + byte[] body; + if (compress) { + body = zipResult(replyBody); + builder.header(HttpHeaders.CONTENT_TYPE, "application/zip"); + } else { + body = replyBody.getBytes(); + } + return builder.body(body); + } + + private byte[] zipResult(String replyBody) { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + ZipOutputStream zipOutputStream = new ZipOutputStream(baos); + ZipEntry entry = new ZipEntry("index.json"); + entry.setSize(replyBody.length()); + try { + zipOutputStream.putNextEntry(entry); + zipOutputStream.write(replyBody.getBytes()); + zipOutputStream.closeEntry(); + zipOutputStream.close(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + return baos.toByteArray(); + } + + // public static ResponseEntity generateReply(String acceptHeader, + // List contextLinks, + // String expandedJson, ContextResolverBasic contextResolver, String + // atContextServerUrl) + // throws ResponseException { + // CompactedJson simplified = contextResolver.compact(expandedJson, + // contextLinks); + // BodyBuilder builder = ResponseEntity.status(HttpStatus.OK); + // ResponseEntity result; + // if ("application/json".equalsIgnoreCase(acceptHeader)) { + // builder = builder.contentType(MediaType.APPLICATION_JSON); + // + // String[] links = new String[contextLinks.size()]; + // Object[] contextLinksArray = contextLinks.toArray(); + // for (int i = 0; i < contextLinksArray.length; i++) { + // links[i] = (String) contextLinksArray[i]; + // } + // + // builder = builder.header("Link", links); + // + // result = builder.body(simplified.getCompacted()); + // } else { + // builder = builder.header("Content-Type", "application/ld+json"); + // + // result = builder.body(simplified.getCompactedWithContext()); + // } + // return result; + // } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/MicroServiceUtils.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/MicroServiceUtils.java new file mode 100644 index 0000000000000000000000000000000000000000..bfe2d631ca1a6e472596c1e21b08377d1a2e7b4c --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/MicroServiceUtils.java @@ -0,0 +1,47 @@ +package eu.neclab.ngsildbroker.commons.tools; + +import java.net.URI; +import java.net.URISyntaxException; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import com.netflix.appinfo.InstanceInfo; +import com.netflix.discovery.EurekaClient; +import com.netflix.discovery.shared.Application; + +public class MicroServiceUtils { + private final static Logger logger = LogManager.getLogger(MicroServiceUtils.class); + + public static String getResourceURL(EurekaClient eurekaClient, String resource) { + logger.trace("getSubscriptionResourceURL() :: started"); + Application application = eurekaClient.getApplication("gateway"); + InstanceInfo instanceInfo = application.getInstances().get(0); + // TODO : search for a better way to resolve http or https + String hostIP = instanceInfo.getIPAddr(); + int port = instanceInfo.getPort(); + StringBuilder url = new StringBuilder("http://").append(hostIP).append(":").append(port) + .append(resource); + // System.out.println("URL : "+url.toString()); + logger.trace("getSubscriptionResourceURL() :: completed"); + return url.toString(); + } + + public static URI getGatewayURL(EurekaClient eurekaClient) { + logger.trace("getGatewayURL() :: started"); + Application application = eurekaClient.getApplication("gateway"); + InstanceInfo instanceInfo = application.getInstances().get(0); + // TODO : search for a better way to resolve http or https + String hostIP = instanceInfo.getIPAddr(); + int port = instanceInfo.getPort(); + StringBuilder url = new StringBuilder("http://").append(hostIP).append(":").append(port); + // System.out.println("URL : "+url.toString()); + logger.trace("getGatewayURL() :: completed"); + try { + return new URI(url.toString()); + } catch (URISyntaxException e) { + throw new AssertionError("something went really wrong here when creating a URL... this should never happen but did with " + url.toString()); + } + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/SerializationTools.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/SerializationTools.java new file mode 100644 index 0000000000000000000000000000000000000000..7e823eb8583442bbb4a368d235dbcff665e0faac --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/SerializationTools.java @@ -0,0 +1,668 @@ +package eu.neclab.ngsildbroker.commons.tools; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.text.ParseException; +import java.text.SimpleDateFormat; +import java.time.Instant; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.function.Consumer; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.github.filosganga.geogson.gson.GeometryAdapterFactory; +import com.github.filosganga.geogson.model.Geometry; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParseException; +import com.google.gson.JsonPrimitive; +import com.google.gson.JsonSerializationContext; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.GeoProperty; +import eu.neclab.ngsildbroker.commons.datatypes.GeoPropertyEntry; +import eu.neclab.ngsildbroker.commons.datatypes.Property; +import eu.neclab.ngsildbroker.commons.datatypes.PropertyEntry; +import eu.neclab.ngsildbroker.commons.datatypes.Relationship; +import eu.neclab.ngsildbroker.commons.datatypes.RelationshipEntry; +import eu.neclab.ngsildbroker.commons.datatypes.TypedValue; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; + +public class SerializationTools { +// public static SimpleDateFormat formatter = new SimpleDateFormat(NGSIConstants.DEFAULT_DATE_FORMAT); + private static DateTimeFormatter informatter = DateTimeFormatter + .ofPattern(NGSIConstants.ALLOWED_IN_DEFAULT_DATE_FORMAT).withZone(ZoneId.of("Z"));//.withZone(ZoneId.systemDefault()); + public static DateTimeFormatter formatter = DateTimeFormatter + .ofPattern(NGSIConstants.ALLOWED_OUT_DEFAULT_DATE_FORMAT).withZone(ZoneId.of("Z"));//systemDefault()); +// public static SimpleDateFormat forgivingFormatter = new SimpleDateFormat( +// NGSIConstants.DEFAULT_FORGIVING_DATE_FORMAT); + + public static Gson geojsonGson = new GsonBuilder().registerTypeAdapterFactory(new GeometryAdapterFactory()) + .create(); + + public static GeoProperty parseGeoProperty(JsonArray topLevelArray, String key) { + GeoProperty prop = new GeoProperty(); + try { + prop.setId(new URI(key)); + } catch (URISyntaxException e) { + throw new JsonParseException(e); + } + prop.setType(NGSIConstants.NGSI_LD_GEOPROPERTY); + Iterator it = topLevelArray.iterator(); + HashMap entries = new HashMap(); + while (it.hasNext()) { + JsonObject next = (JsonObject) it.next(); + ArrayList properties = new ArrayList(); + ArrayList relationships = new ArrayList(); + Long createdAt = null, observedAt = null, modifiedAt = null; + String geoValueStr = null; + Geometry geoValue = null; + String dataSetId = null; + String unitCode = null; + String name = null; + for (Entry entry : next.entrySet()) { + String propKey = entry.getKey(); + JsonElement value = entry.getValue(); + if (propKey.equals(NGSIConstants.NGSI_LD_HAS_VALUE)) { + JsonElement propValue = value.getAsJsonArray().get(0); + if (propValue.isJsonPrimitive()) { + JsonPrimitive primitive = propValue.getAsJsonPrimitive(); + if (primitive.isString()) { + geoValueStr = primitive.getAsString(); + geoValue = DataSerializer.getGeojsonGeometry(primitive.getAsString()); + } + } else { + JsonElement atValue = propValue.getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE); + if (atValue != null) { + if (atValue.isJsonPrimitive()) { + JsonPrimitive primitive = atValue.getAsJsonPrimitive(); + if (primitive.isString()) { + geoValueStr = primitive.getAsString(); + geoValue = DataSerializer.getGeojsonGeometry(primitive.getAsString()); + } + + } else { + geoValueStr = atValue.getAsString(); + geoValue = DataSerializer.getGeojsonGeometry(atValue.getAsString()); + } + } else { + geoValueStr = propValue.toString(); + geoValue = DataSerializer.getGeojsonGeometry(propValue.toString()); + } + + } + } else if (propKey.equals(NGSIConstants.NGSI_LD_OBSERVED_AT)) { + try { + observedAt = date2Long(value.getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } catch (Exception e) { + throw new JsonParseException(e); + } + } else if (propKey.equals(NGSIConstants.NGSI_LD_CREATED_AT)) { + try { + createdAt = date2Long(value.getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } catch (Exception e) { + throw new JsonParseException(e); + } + } else if (propKey.equals(NGSIConstants.NGSI_LD_MODIFIED_AT)) { + try { + modifiedAt = date2Long(value.getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } catch (Exception e) { + throw new JsonParseException(e); + } + + } else if (propKey.equals(NGSIConstants.JSON_LD_TYPE)) { + continue; + } else if (propKey.equals(NGSIConstants.NGSI_LD_DATA_SET_ID)) { + dataSetId = getDataSetId(value); + } else if (propKey.equals(NGSIConstants.NGSI_LD_NAME)) { + name = getName(value); + } else { + JsonArray subLevelArray = value.getAsJsonArray(); + JsonObject objValue = subLevelArray.get(0).getAsJsonObject(); + if (objValue.has(NGSIConstants.JSON_LD_TYPE)) { + String valueType = objValue.get(NGSIConstants.JSON_LD_TYPE).getAsJsonArray().get(0) + .getAsString(); + if (valueType.equals(NGSIConstants.NGSI_LD_PROPERTY)) { + properties.add(parseProperty(subLevelArray, propKey)); + } else if (valueType.equals(NGSIConstants.NGSI_LD_RELATIONSHIP)) { + relationships.add(parseRelationship(subLevelArray, propKey)); + } + } else { + throw new JsonParseException( + "cannot determine type of sub attribute. please provide a valid type"); + } + } + + } + GeoPropertyEntry geoPropEntry = new GeoPropertyEntry(dataSetId, geoValueStr, geoValue); + geoPropEntry.setProperties(properties); + geoPropEntry.setRelationships(relationships); + geoPropEntry.setCreatedAt(createdAt); + geoPropEntry.setObservedAt(observedAt); + geoPropEntry.setModifiedAt(modifiedAt); + geoPropEntry.setName(name); + entries.put(geoPropEntry.getDataSetId(), geoPropEntry); + } + prop.setEntries(entries); + return prop; + } + + @SuppressWarnings("unchecked") + public static Property parseProperty(JsonArray topLevelArray, String key) { + Property prop = new Property(); + try { + prop.setId(new URI(key)); + } catch (URISyntaxException e) { + throw new JsonParseException(e); + } + prop.setType(NGSIConstants.NGSI_LD_PROPERTY); + Iterator it = topLevelArray.iterator(); + HashMap entries = new HashMap(); + while (it.hasNext()) { + JsonObject next = (JsonObject) it.next(); + ArrayList properties = new ArrayList(); + ArrayList relationships = new ArrayList(); + Long createdAt = null, observedAt = null, modifiedAt = null; + Object propValue = null; + String dataSetId = null; + String unitCode = null; + String name = null; + for (Entry entry : next.entrySet()) { + String propKey = entry.getKey(); + JsonElement value = entry.getValue(); + if (propKey.equals(NGSIConstants.NGSI_LD_HAS_VALUE)) { + propValue = getHasValue(value); + + } else if (propKey.equals(NGSIConstants.NGSI_LD_OBSERVED_AT)) { + try { + observedAt = date2Long(value.getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } catch (Exception e) { + throw new JsonParseException(e); + } + } else if (propKey.equals(NGSIConstants.NGSI_LD_CREATED_AT)) { + try { + createdAt = date2Long(value.getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } catch (Exception e) { + throw new JsonParseException(e); + } + } else if (propKey.equals(NGSIConstants.NGSI_LD_MODIFIED_AT)) { + try { + modifiedAt = date2Long(value.getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString()); + } catch (Exception e) { + throw new JsonParseException(e); + } + } else if (propKey.equals(NGSIConstants.JSON_LD_TYPE)) { + continue; + } else if (propKey.equals(NGSIConstants.NGSI_LD_INSTANCE_ID)) { + continue; + } else if (propKey.equals(NGSIConstants.NGSI_LD_UNIT_CODE)) { + unitCode = getUnitCode(value); + } else if (propKey.equals(NGSIConstants.NGSI_LD_DATA_SET_ID)) { + dataSetId = getDataSetId(value); + } else if (propKey.equals(NGSIConstants.NGSI_LD_NAME)) { + name = getName(value); + } else { + JsonArray subLevelArray = value.getAsJsonArray(); + JsonObject objValue = subLevelArray.get(0).getAsJsonObject(); + if (objValue.has(NGSIConstants.JSON_LD_TYPE)) { + String valueType = objValue.get(NGSIConstants.JSON_LD_TYPE).getAsJsonArray().get(0) + .getAsString(); + if (valueType.equals(NGSIConstants.NGSI_LD_PROPERTY)) { + properties.add(parseProperty(subLevelArray, propKey)); + } else if (valueType.equals(NGSIConstants.NGSI_LD_RELATIONSHIP)) { + relationships.add(parseRelationship(subLevelArray, propKey)); + } + } else { + throw new JsonParseException( + "cannot determine type of sub attribute. please provide a valid type"); + } + } + + } + if (propValue == null) { + throw new JsonParseException("Values cannot be null"); + } + PropertyEntry propEntry = new PropertyEntry(dataSetId, propValue); + propEntry.setProperties(properties); + propEntry.setRelationships(relationships); + propEntry.setCreatedAt(createdAt); + propEntry.setObservedAt(observedAt); + propEntry.setModifiedAt(modifiedAt); + propEntry.setName(name); + propEntry.setUnitCode(unitCode); + entries.put(propEntry.getDataSetId(), propEntry); + + } + prop.setEntries(entries); + return prop; + } + + private static String getName(JsonElement value) { + // TODO Auto-generated method stub + return null; + } + + private static String getUnitCode(JsonElement value) { + // TODO Auto-generated method stub + return null; + } + + @SuppressWarnings("unchecked") + private static Object getHasValue(JsonElement element) { + if (element.isJsonArray()) { + JsonArray array = element.getAsJsonArray(); + ArrayList result = new ArrayList(); + array.forEach(new Consumer() { + @Override + public void accept(JsonElement t) { + result.add(getHasValue(t)); + + } + }); + return result; + } else if (element.isJsonObject()) { + JsonObject jsonObj = element.getAsJsonObject(); + if (jsonObj.has(NGSIConstants.JSON_LD_VALUE) && jsonObj.has(NGSIConstants.JSON_LD_TYPE)) { + Object objValue; + JsonPrimitive atValue = jsonObj.get(NGSIConstants.JSON_LD_VALUE).getAsJsonPrimitive(); + if (atValue.isJsonNull()) { + throw new JsonParseException("Values cannot be null"); + } + if (atValue.isBoolean()) { + objValue = atValue.getAsBoolean(); + } else if (atValue.isNumber()) { + objValue = atValue.getAsDouble(); + } else if (atValue.isString()) { + objValue = atValue.getAsString(); + } else { + objValue = jsonObj.get(NGSIConstants.JSON_LD_VALUE).getAsString(); + } + + return new TypedValue(jsonObj.get(NGSIConstants.JSON_LD_TYPE).getAsString(), objValue); + } + if (jsonObj.has(NGSIConstants.JSON_LD_VALUE)) { + JsonPrimitive atValue = jsonObj.get(NGSIConstants.JSON_LD_VALUE).getAsJsonPrimitive(); + if (atValue.isJsonNull()) { + throw new JsonParseException("Values cannot be null"); + } + if (atValue.isBoolean()) { + return atValue.getAsBoolean(); + } + if (atValue.isNumber()) { + return atValue.getAsDouble(); + } + if (atValue.isString()) { + return atValue.getAsString(); + } + + return jsonObj.get(NGSIConstants.JSON_LD_VALUE).getAsString(); + } else { + HashMap> result = new HashMap>(); + for (Entry entry : jsonObj.entrySet()) { + result.put(entry.getKey(), (List) getHasValue(entry.getValue())); + } + return result; + } + + } else { + // should never be the case... but just in case store the element as string + // representation + ArrayList result = new ArrayList(); + result.add(element.getAsString()); + return result; + } + + } + + public static JsonArray getValueArray(Integer value) { + return getValueArray(new JsonPrimitive(value)); + } + + public static JsonArray getValueArray(String value) { + return getValueArray(new JsonPrimitive(value)); + } + + public static JsonArray getValueArray(Long value) { + return getValueArray(new JsonPrimitive(value)); + } + + public static JsonArray getValueArray(Double value) { + return getValueArray(new JsonPrimitive(value)); + } + + public static JsonArray getValueArray(Float value) { + return getValueArray(new JsonPrimitive(value)); + } + + public static JsonArray getValueArray(Boolean value) { + return getValueArray(new JsonPrimitive(value)); + } + + private static JsonArray getValueArray(JsonElement value) { + JsonArray result = new JsonArray(); + JsonObject temp = new JsonObject(); + temp.add(NGSIConstants.JSON_LD_VALUE, value); + result.add(temp); + return result; + } + + public static Long date2Long(String dateString) throws Exception { + return Instant.from(informatter.parse(dateString)).toEpochMilli(); + + } + + private static Long getTimestamp(JsonElement value) throws Exception { + return date2Long( + value.getAsJsonArray().get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString()); + + } + + public static Relationship parseRelationship(JsonArray topLevelArray, String key) { + Relationship relationship = new Relationship(); + relationship.setType(NGSIConstants.NGSI_LD_RELATIONSHIP); + HashMap entries = new HashMap(); + try { + relationship.setId(new URI(key)); + } catch (URISyntaxException e) { + throw new JsonParseException("The Id has to be a URI"); + } + + Iterator it = topLevelArray.iterator(); + while (it.hasNext()) { + JsonObject next = (JsonObject) it.next(); + ArrayList properties = new ArrayList(); + ArrayList relationships = new ArrayList(); + Long createdAt = null, observedAt = null, modifiedAt = null; + URI relObj = null; + String dataSetId = null; + String name = null; + for (Entry entry : next.entrySet()) { + String propKey = entry.getKey(); + JsonElement value = entry.getValue(); + + if (propKey.equals(NGSIConstants.NGSI_LD_HAS_OBJECT)) { + if (value.getAsJsonArray().size() != 1) { + throw new JsonParseException("Relationships have to have exactly one object"); + } + try { + relObj = new URI(value.getAsJsonArray().get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_ID) + .getAsString()); + } catch (URISyntaxException e) { + throw new JsonParseException("Relationships have to be a URI"); + } + } else if (propKey.equals(NGSIConstants.NGSI_LD_OBSERVED_AT)) { + + try { + observedAt = getTimestamp(value); + } catch (Exception e) { + throw new JsonParseException(e); + } + } else if (propKey.equals(NGSIConstants.NGSI_LD_CREATED_AT)) { + try { + createdAt = getTimestamp(value); + } catch (Exception e) { + throw new JsonParseException(e); + } + } else if (propKey.equals(NGSIConstants.NGSI_LD_MODIFIED_AT)) { + try { + modifiedAt = getTimestamp(value); + } catch (Exception e) { + throw new JsonParseException(e); + } + + } else if (propKey.equals(NGSIConstants.JSON_LD_TYPE)) { + continue; + } else if (propKey.equals(NGSIConstants.NGSI_LD_DATA_SET_ID)) { + dataSetId = getDataSetId(value); + } else if (propKey.equals(NGSIConstants.NGSI_LD_NAME)) { + name = getName(value); + } else { + JsonArray subLevelArray = value.getAsJsonArray(); + JsonObject objValue = subLevelArray.get(0).getAsJsonObject(); + if (objValue.has(NGSIConstants.JSON_LD_TYPE)) { + String valueType = objValue.get(NGSIConstants.JSON_LD_TYPE).getAsJsonArray().get(0) + .getAsString(); + if (valueType.equals(NGSIConstants.NGSI_LD_PROPERTY)) { + properties.add(parseProperty(subLevelArray, propKey)); + } else if (valueType.equals(NGSIConstants.NGSI_LD_RELATIONSHIP)) { + relationships.add(parseRelationship(subLevelArray, propKey)); + } + } else { + throw new JsonParseException( + "cannot determine type of sub attribute. please provide a valid type"); + } + } + + } + if (relObj == null) { + throw new JsonParseException("Relationships have to have exactly one object"); + } + RelationshipEntry object = new RelationshipEntry(dataSetId, relObj); + object.setProperties(properties); + object.setRelationships(relationships); + object.setCreatedAt(createdAt); + object.setObservedAt(observedAt); + object.setModifiedAt(modifiedAt); + object.setName(name); + entries.put(object.getDataSetId(), object); + } + relationship.setObjects(entries); + return relationship; + } + + private static String getDataSetId(JsonElement value) { + // TODO Auto-generated method stub + return null; + } + + public static JsonElement getJson(Long timestamp, JsonSerializationContext context) { + JsonArray observedArray = new JsonArray(); + JsonObject observedObj = new JsonObject(); + observedObj.add(NGSIConstants.JSON_LD_VALUE, + context.serialize(formatter.format(Instant.ofEpochMilli(timestamp)))); + observedObj.add(NGSIConstants.JSON_LD_TYPE, context.serialize(NGSIConstants.NGSI_LD_DATE_TIME)); + observedArray.add(observedObj); + return observedArray; + } + + public static JsonElement getJson(Geometry geojsonGeometry) { + return new JsonPrimitive(geojsonGson.toJson(geojsonGeometry)); + } + + /** + * + * @param timestamp + * @param context + * @param type - to indicate which type of serialization is required For ex + * (in Entity payload). + * + * createdAt must be serialized as : + * "https://uri.etsi.org/ngsi-ld/createdAt": [{ "@type": + * ["https://uri.etsi.org/ngsi-ld/Property"], + * "https://uri.etsi.org/ngsi-ld/hasValue": [{ "@value": + * "2017-07-29T12:00:04" }] }] + * + * whereas observedAt must be serialized as : "http: + * //uri.etsi.org/ngsi-ld/observedAt": [{ + * @value: "2017-07-29T12:00:04", + * @type: "https://uri.etsi.org/ngsi-ld/DateTime" }] + * + * although both are same(Long/Timestamp) but they need to serialize + * differently. serializaton mst be of type : + * + * @return JsonElement + */ + // TODO : How type will be decided from Entity class variables. + + public static JsonElement getJson(Property property, JsonSerializationContext context) { + JsonArray result = new JsonArray(); + HashMap entries = property.getEntries(); + for (PropertyEntry entry : entries.values()) { + JsonObject top = new JsonObject(); + JsonArray type = new JsonArray(); + type.add(new JsonPrimitive(entry.getType())); + top.add(NGSIConstants.JSON_LD_TYPE, type); + + top.add(NGSIConstants.NGSI_LD_HAS_VALUE, getJson(entry.getValue(), context)); + if (entry.getObservedAt() > 0) { + top.add(NGSIConstants.NGSI_LD_OBSERVED_AT, getJson(entry.getObservedAt(), context)); + } + if (entry.getCreatedAt() > 0) { + top.add(NGSIConstants.NGSI_LD_CREATED_AT, getJson(entry.getCreatedAt(), context)); + } + if (entry.getModifiedAt() > 0) { + top.add(NGSIConstants.NGSI_LD_MODIFIED_AT, getJson(entry.getModifiedAt(), context)); + } + for (Property propOfProp : entry.getProperties()) { + top.add(propOfProp.getId().toString(), getJson(propOfProp, context)); + } + for (Relationship relaOfProp : entry.getRelationships()) { + top.add(relaOfProp.getId().toString(), getJson(relaOfProp, context)); + } + result.add(top); + } + + return result; + } + + @SuppressWarnings("unchecked") + private static JsonElement getJson(Object value, JsonSerializationContext context) { + JsonElement result; + if (value instanceof Map) { + return getComplexValue((Map>) value, context); + } else if (value instanceof List) { + List myList = (List) value; + JsonArray myArray = new JsonArray(); + for (Object object : myList) { + myArray.add(getJson(object, context)); + } + result = myArray; + } else { + + if (value instanceof TypedValue) { + return context.serialize(value); + } + result = new JsonObject(); + ((JsonObject) result).add(NGSIConstants.JSON_LD_VALUE, context.serialize(value)); + + } + + return result; + } + + private static JsonObject getComplexValue(Map> value, JsonSerializationContext context) { + JsonObject top = new JsonObject(); + for (Entry> entry : value.entrySet()) { + top.add(entry.getKey(), getJson(entry.getValue(), context)); + } + return top; + } + + public static JsonElement getJson(Relationship relationship, JsonSerializationContext context) { + JsonArray result = new JsonArray(); + + for (RelationshipEntry entry : relationship.getEntries().values()) { + JsonObject top = new JsonObject(); + JsonArray type = new JsonArray(); + type.add(new JsonPrimitive(entry.getType())); + top.add(NGSIConstants.JSON_LD_TYPE, type); + JsonArray value = new JsonArray(); + JsonObject objValue = new JsonObject(); + objValue.add(NGSIConstants.JSON_LD_ID, context.serialize(entry.getObject())); + value.add(objValue); + top.add(NGSIConstants.NGSI_LD_HAS_OBJECT, value); + if (entry.getObservedAt() > 0) { + top.add(NGSIConstants.NGSI_LD_OBSERVED_AT, getJson(entry.getObservedAt(), context)); + } + if (entry.getCreatedAt() > 0) { + top.add(NGSIConstants.NGSI_LD_CREATED_AT, getJson(entry.getCreatedAt(), context)); + } + if (entry.getModifiedAt() > 0) { + top.add(NGSIConstants.NGSI_LD_MODIFIED_AT, getJson(entry.getModifiedAt(), context)); + } + for (Property propOfProp : entry.getProperties()) { + top.add(propOfProp.getId().toString(), getJson(propOfProp, context)); + } + for (Relationship relaOfProp : entry.getRelationships()) { + top.add(relaOfProp.getId().toString(), getJson(relaOfProp, context)); + } + result.add(top); + + } + return result; + } + + public static JsonElement getJson(GeoProperty property, JsonSerializationContext context) { + JsonArray result = new JsonArray(); + for (GeoPropertyEntry entry : property.getEntries().values()) { + JsonObject top = new JsonObject(); + JsonArray type = new JsonArray(); + type.add(new JsonPrimitive(entry.getType())); + top.add(NGSIConstants.JSON_LD_TYPE, type); + JsonArray value = new JsonArray(); + JsonObject objValue = new JsonObject(); + objValue.add(NGSIConstants.JSON_LD_VALUE, context.serialize(entry.getValue())); + value.add(objValue); + top.add(NGSIConstants.NGSI_LD_HAS_VALUE, value); + if (entry.getObservedAt() > 0) { + top.add(NGSIConstants.NGSI_LD_OBSERVED_AT, getJson(entry.getObservedAt(), context)); + } + if (entry.getCreatedAt() > 0) { + top.add(NGSIConstants.NGSI_LD_CREATED_AT, getJson(entry.getCreatedAt(), context)); + } + if (entry.getModifiedAt() > 0) { + top.add(NGSIConstants.NGSI_LD_MODIFIED_AT, getJson(entry.getModifiedAt(), context)); + } + for (Property propOfProp : entry.getProperties()) { + top.add(propOfProp.getId().toString(), getJson(propOfProp, context)); + } + for (Relationship relaOfProp : entry.getRelationships()) { + top.add(relaOfProp.getId().toString(), getJson(relaOfProp, context)); + } + result.add(top); + + } + return result; + } + + /* + * public static JsonElement getJsonForCSource(GeoProperty geoProperty, + * JsonSerializationContext context) { Gson gson = new Gson(); return + * gson.fromJson(geoProperty.getValue(), JsonElement.class); } + */ + public static JsonNode parseJson(ObjectMapper objectMapper, String payload) throws ResponseException { + JsonNode json = null; + try { + json = objectMapper.readTree(payload); + if (json.isNull()) { + throw new ResponseException(ErrorType.InvalidRequest); + } + } catch (JsonParseException e) { + throw new ResponseException(ErrorType.InvalidRequest); + } catch (IOException e) { + throw new ResponseException(ErrorType.BadRequestData); + } + return json; + } + +} diff --git a/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/StringUtils.java b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/StringUtils.java new file mode 100644 index 0000000000000000000000000000000000000000..938098db1682cd348546c97086d33c4f8245cc6d --- /dev/null +++ b/scorpio-broker/Commons/src/main/java/eu/neclab/ngsildbroker/commons/tools/StringUtils.java @@ -0,0 +1,114 @@ +package eu.neclab.ngsildbroker.commons.tools; + + +public final class StringUtils { + + private StringUtils() { + // Prevent instantiation + } + + /** + * Construct a string in the form of "x hour(s) y minute(s) z minute(s)" + * from a time duration in milliseconds. Some examples of possible output :
+ *
    + *
  • 1 hour 2 minutes
  • + *
  • 1 hour 2 minutes 1 second
  • + *
  • 3 hours 1 minutes 2 seconds
  • + *
  • 0 second
  • + *
  • 4 hours
  • + *
  • 5 hours 1 second
  • + *
+ * + * @param msTime + * time in milliseconds + * @return the formatted time string + */ + public static String formatDurationInMs(long msTime) { + int sTime = (int) msTime / 1000; + int seconds = sTime % 60; + int minutes = (sTime % 3600) / 60; + int hours = sTime / 3600; + StringBuilder sBuilder = new StringBuilder(); + + if (hours > 1) { + sBuilder.append(hours + " hours "); + } else if (hours == 1) { + sBuilder.append("1 hour "); + } + + if (minutes > 1) { + sBuilder.append(minutes + " minutes "); + } else if (minutes == 1) { + sBuilder.append("1 minute "); + } + + if (seconds > 1) { + sBuilder.append(seconds + " seconds "); + } else if (seconds == 1) { + sBuilder.append("1 second "); + } else if (sBuilder.length() == 0) { + sBuilder.append("less than a second "); + } + + return sBuilder.toString().trim(); + } + + /** + * Check if a string has value and is not an empty string. + * + * @param input + * the string to check + * @return true if it's set and not empty + */ + public static boolean isSet(String input) { + return input != null && !(input.isEmpty() || input.trim().isEmpty()); + } + + /** + * Count how many a char appears in a string. It returns 0 if the string is + * not set. + * + * @param text + * string to search + * @param someCharacter + * character to count + * @return the number of times the char appears in the text + */ + public static int countOccurrences(String text, char someCharacter) { + if (!isSet(text)) { + return 0; + } + int count = 0; + for (int i = 0; i < text.length(); i++) { + if (text.charAt(i) == someCharacter) { + count++; + } + } + return count; + } + + /** + * Apply trimming any string on a string. + * + * @param source + * String source + * @param removed + * Removed string + * @return String which is not started and ended with a removed string. + */ + public static String trimWithString(String source, String removed) { + if (!isSet(removed) || !isSet(removed)) { + return source; + } + String ret = source; + while (ret.startsWith(removed)) { + ret = ret.substring(removed.length()); + } + while (ret.endsWith(removed)) { + ret = ret.substring(0, ret.length() - removed.length()); + } + + return ret; + } + +} diff --git a/scorpio-broker/Commons/src/test/java/eu/neclab/ngsildbroker/commons/ngsiqueries/test/QueryParserTests.java b/scorpio-broker/Commons/src/test/java/eu/neclab/ngsildbroker/commons/ngsiqueries/test/QueryParserTests.java new file mode 100644 index 0000000000000000000000000000000000000000..a063f39bd34e7f215189beaa0a2ec49086fa17bf --- /dev/null +++ b/scorpio-broker/Commons/src/test/java/eu/neclab/ngsildbroker/commons/ngsiqueries/test/QueryParserTests.java @@ -0,0 +1,252 @@ +package eu.neclab.ngsildbroker.commons.ngsiqueries.test; + +import static org.junit.Assert.assertEquals; + +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; + +import eu.neclab.ngsildbroker.commons.datatypes.QueryTerm; +import eu.neclab.ngsildbroker.commons.exceptions.BadRequestException; +import eu.neclab.ngsildbroker.commons.ngsiqueries.QueryParser; + +public class QueryParserTests { + + private static QueryParser parser; + + @BeforeClass + public static void setup() { + parser = new QueryParser(); + } + + @Test + public void testSingleQuery() { + String attribName = "test1"; + String operator = "=="; + String operant = "\"teststring\""; + String q = attribName + operator + operant; + QueryTerm expected = new QueryTerm(null, null); + expected.setAttribute(attribName); + expected.setOperator(operator); + expected.setOperant(operant); + + QueryTerm actual; + try { + actual = parser.parseQuery(q, null); + assertEquals(expected, actual); + } catch (BadRequestException e) { + Assert.fail(e.getLocalizedMessage()); + } + + } + + @Test + public void testMultiAndQuery() { + String attribName1 = "test1"; + String operator1 = "=="; + String operant1 = "\"teststring\""; + + String attribName2 = "test1"; + String operator2 = ">="; + String operant2 = "12345"; + + QueryTerm expected = new QueryTerm(null, null); + expected.setAttribute(attribName1); + expected.setOperator(operator1); + expected.setOperant(operant1); + expected.setNextAnd(true); + + QueryTerm expectedNext = new QueryTerm(null, null); + expectedNext.setAttribute(attribName2); + expectedNext.setOperator(operator2); + expectedNext.setOperant(operant2); + expected.setNext(expectedNext); + + String q = attribName1 + operator1 + operant1 + ";" + attribName2 + operator2 + operant2; + try { + QueryTerm actual = parser.parseQuery(q, null); + assertEquals(expected, actual); + } catch (BadRequestException e) { + Assert.fail(e.getLocalizedMessage()); + } + } + + @Test + public void testMultiOrQuery() { + String attribName1 = "test1"; + String operator1 = "=="; + String operant1 = "\"teststring\""; + + String attribName2 = "test2"; + String operator2 = ">="; + String operant2 = "12345"; + + QueryTerm expected = new QueryTerm(null, null); + expected.setAttribute(attribName1); + expected.setOperator(operator1); + expected.setOperant(operant1); + expected.setNextAnd(false); + + QueryTerm expectedNext = new QueryTerm(null, null); + expectedNext.setAttribute(attribName2); + expectedNext.setOperator(operator2); + expectedNext.setOperant(operant2); + expected.setNext(expectedNext); + + String q = attribName1 + operator1 + operant1 + "|" + attribName2 + operator2 + operant2; + try { + QueryTerm actual = parser.parseQuery(q, null); + assertEquals(expected, actual); + } catch (BadRequestException e) { + Assert.fail(e.getLocalizedMessage()); + } + } + + @Test + public void testMultiMixedAndOrQuery() { + String attribName1 = "test1"; + String operator1 = "=="; + String operant1 = "\"teststring\""; + + String attribName2 = "test2"; + String operator2 = ">="; + String operant2 = "12345"; + + String attribName3 = "test3"; + String operator3 = "!="; + String operant3 = "\"testst123ring\""; + + String attribName4 = "test4"; + String operator4 = "<="; + String operant4 = "12345"; + + QueryTerm expected = new QueryTerm(null, null); + expected.setAttribute(attribName1); + expected.setOperator(operator1); + expected.setOperant(operant1); + expected.setNextAnd(false); + + QueryTerm expectedNext = new QueryTerm(null, null); + expectedNext.setAttribute(attribName2); + expectedNext.setOperator(operator2); + expectedNext.setOperant(operant2); + expected.setNext(expectedNext); + + QueryTerm expectedNext1 = new QueryTerm(null, null); + expectedNext1.setAttribute(attribName3); + expectedNext1.setOperator(operator3); + expectedNext1.setOperant(operant3); + expectedNext.setNext(expectedNext1); + expectedNext.setNextAnd(true); + + QueryTerm expectedNext2 = new QueryTerm(null, null); + expectedNext2.setAttribute(attribName4); + expectedNext2.setOperator(operator4); + expectedNext2.setOperant(operant4); + expectedNext1.setNext(expectedNext2); + expectedNext1.setNextAnd(false); + + String q = attribName1 + operator1 + operant1 + "|" + attribName2 + operator2 + operant2 + ";" + attribName3 + + operator3 + operant3 + "|" + attribName4 + operator4 + operant4; + try { + QueryTerm actual = parser.parseQuery(q, null); + assertEquals(expected, actual); + } catch (BadRequestException e) { + Assert.fail(e.getLocalizedMessage()); + } + } + + @Test + public void testBracketsQuery() { + + String attribName2 = "test2"; + String operator2 = ">="; + String operant2 = "12345"; + + String attribName3 = "test3"; + String operator3 = "!="; + String operant3 = "\"testst123ring\""; + + QueryTerm expected = new QueryTerm(null, null); + + QueryTerm expectedFirstChild = new QueryTerm(null, null); + expectedFirstChild.setAttribute(attribName2); + expectedFirstChild.setOperator(operator2); + expectedFirstChild.setOperant(operant2); + expected.setFirstChild(expectedFirstChild); + + QueryTerm expectedFirstChildNext = new QueryTerm(null, null); + expectedFirstChildNext.setAttribute(attribName3); + expectedFirstChildNext.setOperator(operator3); + expectedFirstChildNext.setOperant(operant3); + expectedFirstChild.setNext(expectedFirstChildNext); + expectedFirstChild.setNextAnd(true); + String q = "(" + attribName2 + operator2 + operant2 + ";" + attribName3 + operator3 + operant3 + ")"; + try { + QueryTerm actual = parser.parseQuery(q, null); + assertEquals(expected, actual); + } catch (BadRequestException e) { + Assert.fail(e.getLocalizedMessage()); + } + } + + @Test + public void testMultiBracketsQuery() { + + String attribName1 = "test1"; + String operator1 = "=="; + String operant1 = "\"teststring\""; + + String attribName2 = "test2"; + String operator2 = ">="; + String operant2 = "12345"; + + String attribName3 = "test3"; + String operator3 = "!="; + String operant3 = "\"testst123ring\""; + + String attribName4 = "test4"; + String operator4 = "<="; + String operant4 = "12345"; + + String q = "(" + attribName1 + operator1 + operant1 + ";(" + attribName2 + operator2 + operant2 + "|" + + attribName3 + operator3 + operant3 + "))|" + attribName4 + operator4 + operant4; + QueryTerm expected = new QueryTerm(null, null); + QueryTerm expected1 = new QueryTerm(null, null); + expected1.setAttribute(attribName1); + expected1.setOperator(operator1); + expected1.setOperant(operant1); + expected1.setNextAnd(true); + expected.setFirstChild(expected1); + QueryTerm expected2 = new QueryTerm(null, null); + expected1.setNext(expected2); + + QueryTerm expected3 = new QueryTerm(null, null); + expected3.setAttribute(attribName2); + expected3.setOperator(operator2); + expected3.setOperant(operant2); + expected3.setNextAnd(false); + expected2.setFirstChild(expected3); + QueryTerm expected4 = new QueryTerm(null, null); + expected4.setAttribute(attribName3); + expected4.setOperator(operator3); + expected4.setOperant(operant3); + expected3.setNext(expected4); + + QueryTerm expected5 = new QueryTerm(null, null); + expected5.setAttribute(attribName4); + expected5.setOperator(operator4); + expected5.setOperant(operant4); + expected.setNextAnd(false); + expected.setNext(expected5); + try { + QueryTerm actual = parser.parseQuery(q, null); + + assertEquals(expected, actual); + } catch (BadRequestException e) { + Assert.fail(e.getLocalizedMessage()); + } + + } + +} diff --git a/scorpio-broker/Commons/src/test/java/eu/neclab/ngsildbroker/commons/ngsiqueries/test/QueryTest.java b/scorpio-broker/Commons/src/test/java/eu/neclab/ngsildbroker/commons/ngsiqueries/test/QueryTest.java new file mode 100644 index 0000000000000000000000000000000000000000..0ffd95e73bba0a3f409c4faac4b10856bb18e0c2 --- /dev/null +++ b/scorpio-broker/Commons/src/test/java/eu/neclab/ngsildbroker/commons/ngsiqueries/test/QueryTest.java @@ -0,0 +1,643 @@ +package eu.neclab.ngsildbroker.commons.ngsiqueries.test; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.BaseProperty; +import eu.neclab.ngsildbroker.commons.datatypes.Property; +import eu.neclab.ngsildbroker.commons.datatypes.PropertyEntry; +import eu.neclab.ngsildbroker.commons.datatypes.QueryTerm; +import eu.neclab.ngsildbroker.commons.exceptions.BadRequestException; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.ngsiqueries.QueryParser; +import eu.neclab.ngsildbroker.commons.tools.SerializationTools; + +/** + * URISyntaxExceptions are thrown intentionally since they will never happen in + * this fully controlled test setup with hand defined URIs + * + * @author hebgen + * + */ +public class QueryTest { + + private static QueryParser parser; + private static ParamsResolver paramResolver; + + @BeforeClass + public static void setup() { + parser = new QueryParser(); + paramResolver = new ParamsResolver() { + @Override + public String expandAttribute(String attribute, List linkHeaders) throws ResponseException { + switch (attribute) { + case NGSIConstants.QUERY_PARAMETER_CREATED_AT: + return NGSIConstants.NGSI_LD_CREATED_AT; + case NGSIConstants.QUERY_PARAMETER_MODIFIED_AT: + return NGSIConstants.NGSI_LD_MODIFIED_AT; + case NGSIConstants.QUERY_PARAMETER_OBSERVED_AT: + return NGSIConstants.NGSI_LD_OBSERVED_AT; + default: + return "http://mytestprop.org/" + attribute; + } + + } + + }; + } + + @Test + public void testObservedModifiedCreated() throws URISyntaxException { + String dateTime = "2019-04-26T12:23:55Z"; + Property stringProp = new Property(); + + stringProp.setId(new URI("http://mytestprop.org/teststring")); + PropertyEntry propEntry = new PropertyEntry("test", "test"); + stringProp.setSingleEntry(propEntry); + try { + propEntry.setCreatedAt(SerializationTools.date2Long(dateTime)); + propEntry.setObservedAt(SerializationTools.date2Long(dateTime)); + propEntry.setModifiedAt(SerializationTools.date2Long(dateTime)); + } catch (Exception e1) { + throw new AssertionError(); + } + String qString = "teststring.observedAt==" + dateTime; + try { + QueryTerm term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(stringProp)); + qString = "teststring.modifiedAt==" + dateTime; + term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(stringProp)); + qString = "teststring.createdAt==" + dateTime; + term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(stringProp)); + } catch (ResponseException e) { + Assert.fail(e.getLocalizedMessage()); + } + + } + + @Test + public void testEquals() throws URISyntaxException { + + Property stringProp = new Property(); + try { + stringProp.setId(new URI("http://mytestprop.org/teststring")); + } catch (URISyntaxException e) { + // left Empty intentionally + } + stringProp.setSingleEntry(new PropertyEntry("test", "test")); + + String qString = "teststring==\"test\""; + String qInt = "testint==4"; + String qDouble = "testdouble==123.456"; + + String qNumberRange = "testint==1..6"; + String qStringRange = "teststring==\"a\"..\"e\""; + + Property simpleStringProp = new Property(); + simpleStringProp.setId(new URI("http://mytestprop.org/teststring")); + simpleStringProp.setSingleEntry(new PropertyEntry("test", "d")); + + Property intProp = new Property(); + intProp.setId(new URI("http://mytestprop.org/testint")); + intProp.setSingleEntry(new PropertyEntry("test", 4)); + + Property doubleProp = new Property(); + doubleProp.setId(new URI("http://mytestprop.org/testdouble")); + doubleProp.setSingleEntry(new PropertyEntry("test", 123.456)); + + Property stringListProp = new Property(); + stringListProp.setId(new URI("http://mytestprop.org/teststring")); + ArrayList stringList = new ArrayList(); + stringList.add("something"); + stringList.add("todo"); + stringList.add("test");// success + stringListProp.setSingleEntry(new PropertyEntry("test", stringList)); + + Property intListProp = new Property(); + intListProp.setId(new URI("http://mytestprop.org/testint")); + + ArrayList intList = new ArrayList(); + intList.add(23); + intList.add(42); + intList.add(123);// success + intListProp.setSingleEntry(new PropertyEntry("test", intList)); + + Property doubleListProp = new Property(); + doubleListProp.setId(new URI("http://mytestprop.org/testdouble")); + ArrayList doubleList = new ArrayList(); + doubleList.add(23.45); + doubleList.add(42.33); + doubleList.add(123.456);// success + doubleListProp.setSingleEntry(new PropertyEntry("test", doubleList)); + try { + QueryTerm term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(stringProp)); + + term = parser.parseQuery(qInt, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(intProp)); + + term = parser.parseQuery(qDouble, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(doubleProp)); + + term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(stringProp)); + + term = parser.parseQuery(qNumberRange, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(intProp)); + + term = parser.parseQuery(qStringRange, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(simpleStringProp)); + + term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(stringListProp)); + + term = parser.parseQuery(qInt, null); + term.setParamsResolver(paramResolver); + assertFalse(term.calculate(intListProp)); + } catch (ResponseException e) { + Assert.fail(e.getLocalizedMessage()); + } + } + + @Test + public void testUnequals() throws URISyntaxException { + Property stringProp = new Property(); + stringProp.setId(new URI("http://mytestprop.org/teststring")); + stringProp.setSingleEntry(new PropertyEntry("test", "test123")); + + String qString = "teststring!=\"test\""; + String qInt = "testint!=4"; + String qDouble = "testdouble!=123.456"; + + String qNumberRange = "testint!=1..6"; + String qStringRange = "teststring!=\"a\"..\"e\""; + + Property simpleStringProp = new Property(); + simpleStringProp.setId(new URI("http://mytestprop.org/teststring")); + simpleStringProp.setSingleEntry(new PropertyEntry("test", "f")); + + Property intProp = new Property(); + intProp.setId(new URI("http://mytestprop.org/testint")); + intProp.setSingleEntry(new PropertyEntry("test", 7)); + + Property doubleProp = new Property(); + doubleProp.setId(new URI("http://mytestprop.org/testdouble")); + doubleProp.setSingleEntry(new PropertyEntry("test", 143.456)); + + Property stringListFalseProp = new Property(); + stringListFalseProp.setId(new URI("http://mytestprop.org/teststring")); + ArrayList stringListFalse = new ArrayList(); + stringListFalse.add("something"); + stringListFalse.add("todo"); + stringListFalse.add("test");// success + stringListFalseProp.setSingleEntry(new PropertyEntry("test", stringListFalse)); + + Property intListFalseProp = new Property(); + intListFalseProp.setId(new URI("http://mytestprop.org/testint")); + + ArrayList intListFalse = new ArrayList(); + intListFalse.add(23); + intListFalse.add(42); + intListFalse.add(123);// success + intListFalseProp.setSingleEntry(new PropertyEntry("test", intListFalse)); + + // Property doubleListFalseProp = new Property(); + // doubleListFalseProp.setId(new URI("http://mytestprop.org/testdouble")); + // ArrayList doubleListFalse = new ArrayList(); + // doubleListFalse.add(23.45); + // doubleListFalse.add(42.33); + // doubleListFalse.add(123.456);//success + // doubleListFalseProp.setValue(intListFalse); + Property stringListProp = new Property(); + stringListProp.setId(new URI("http://mytestprop.org/teststring")); + ArrayList stringList = new ArrayList(); + stringList.add("something"); + stringList.add("todo"); + // stringList.add("test");//success + stringListProp.setSingleEntry(new PropertyEntry("test", stringList)); + + Property intListProp = new Property(); + intListProp.setId(new URI("http://mytestprop.org/testint")); + + ArrayList intList = new ArrayList(); + intList.add(23); + intList.add(42); + // intList.add(123);//success + intListProp.setSingleEntry(new PropertyEntry("test", intList)); + try { + QueryTerm term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(stringProp)); + + term = parser.parseQuery(qInt, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(intProp)); + + term = parser.parseQuery(qDouble, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(doubleProp)); + + term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(stringProp)); + + term = parser.parseQuery(qNumberRange, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(intProp)); + + term = parser.parseQuery(qStringRange, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(simpleStringProp)); + + term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertFalse(term.calculate(stringListFalseProp)); + + term = parser.parseQuery(qInt, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(intListFalseProp)); + + term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(stringListProp)); + + term = parser.parseQuery(qInt, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(intListProp)); + } catch (ResponseException e) { + Assert.fail(e.getLocalizedMessage()); + } + } + + @Test + public void testSmallerEquals() throws URISyntaxException { + Property stringProp = new Property(); + stringProp.setId(new URI("http://mytestprop.org/teststring")); + stringProp.setSingleEntry(new PropertyEntry("test", "test")); + + Property stringSProp = new Property(); + stringSProp.setId(new URI("http://mytestprop.org/teststring")); + stringSProp.setSingleEntry(new PropertyEntry("test", "tes")); + + Property stringBProp = new Property(); + stringBProp.setId(new URI("http://mytestprop.org/teststring")); + stringBProp.setSingleEntry(new PropertyEntry("test", "test123")); + + Property intProp = new Property(); + intProp.setId(new URI("http://mytestprop.org/testint")); + intProp.setSingleEntry(new PropertyEntry("test", 4)); + + Property intSProp = new Property(); + intSProp.setId(new URI("http://mytestprop.org/testint")); + intSProp.setSingleEntry(new PropertyEntry("test", 3)); + + Property intBProp = new Property(); + intBProp.setId(new URI("http://mytestprop.org/testint")); + intBProp.setSingleEntry(new PropertyEntry("test", 5)); + String qString = "teststring<=\"test\""; + String qInt = "testint<=4"; + try { + QueryTerm term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(stringProp)); + assertTrue(term.calculate(stringSProp)); + assertFalse(term.calculate(stringBProp)); + + term = parser.parseQuery(qInt, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(intProp)); + assertTrue(term.calculate(intSProp)); + assertFalse(term.calculate(intBProp)); + } catch (ResponseException e) { + Assert.fail(e.getLocalizedMessage()); + } + + } + + @Test + public void testBiggerEquals() throws URISyntaxException { + Property stringProp = new Property(); + stringProp.setId(new URI("http://mytestprop.org/teststring")); + stringProp.setSingleEntry(new PropertyEntry("test", "test")); + + Property stringSProp = new Property(); + stringSProp.setId(new URI("http://mytestprop.org/teststring")); + stringSProp.setSingleEntry(new PropertyEntry("test", "tes")); + + Property stringBProp = new Property(); + stringBProp.setId(new URI("http://mytestprop.org/teststring")); + stringBProp.setSingleEntry(new PropertyEntry("test", "test123")); + + Property intProp = new Property(); + intProp.setId(new URI("http://mytestprop.org/testint")); + intProp.setSingleEntry(new PropertyEntry("test", 4)); + + Property intSProp = new Property(); + intSProp.setId(new URI("http://mytestprop.org/testint")); + intSProp.setSingleEntry(new PropertyEntry("test", 3)); + + Property intBProp = new Property(); + intBProp.setId(new URI("http://mytestprop.org/testint")); + intBProp.setSingleEntry(new PropertyEntry("test", 5)); + String qString = "teststring>=\"test\""; + String qInt = "testint>=4"; + try { + QueryTerm term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(stringProp)); + assertFalse(term.calculate(stringSProp)); + assertTrue(term.calculate(stringBProp)); + + term = parser.parseQuery(qInt, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(intProp)); + assertFalse(term.calculate(intSProp)); + assertTrue(term.calculate(intBProp)); + } catch (ResponseException e) { + Assert.fail(e.getLocalizedMessage()); + } + } + + @Test + public void testSmaller() throws URISyntaxException { + Property stringProp = new Property(); + stringProp.setId(new URI("http://mytestprop.org/teststring")); + stringProp.setSingleEntry(new PropertyEntry("test", "test")); + + Property stringSProp = new Property(); + stringSProp.setId(new URI("http://mytestprop.org/teststring")); + stringSProp.setSingleEntry(new PropertyEntry("test", "tes")); + + Property stringBProp = new Property(); + stringBProp.setId(new URI("http://mytestprop.org/teststring")); + stringBProp.setSingleEntry(new PropertyEntry("test", "test123")); + + Property intProp = new Property(); + intProp.setId(new URI("http://mytestprop.org/testint")); + intProp.setSingleEntry(new PropertyEntry("test", 4)); + + Property intSProp = new Property(); + intSProp.setId(new URI("http://mytestprop.org/testint")); + intSProp.setSingleEntry(new PropertyEntry("test", 3)); + + Property intBProp = new Property(); + intBProp.setId(new URI("http://mytestprop.org/testint")); + intBProp.setSingleEntry(new PropertyEntry("test", 5)); + String qString = "teststring<\"test\""; + String qInt = "testint<4"; + try { + QueryTerm term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertFalse(term.calculate(stringProp)); + assertTrue(term.calculate(stringSProp)); + assertFalse(term.calculate(stringBProp)); + + term = parser.parseQuery(qInt, null); + term.setParamsResolver(paramResolver); + assertFalse(term.calculate(intProp)); + assertTrue(term.calculate(intSProp)); + assertFalse(term.calculate(intBProp)); + } catch (ResponseException e) { + Assert.fail(e.getLocalizedMessage()); + } + } + + @Test + public void testBigger() throws URISyntaxException { + Property stringProp = new Property(); + stringProp.setId(new URI("http://mytestprop.org/teststring")); + stringProp.setSingleEntry(new PropertyEntry("test", "test")); + + Property stringSProp = new Property(); + stringSProp.setId(new URI("http://mytestprop.org/teststring")); + stringSProp.setSingleEntry(new PropertyEntry("test", "tes")); + + Property stringBProp = new Property(); + stringBProp.setId(new URI("http://mytestprop.org/teststring")); + stringBProp.setSingleEntry(new PropertyEntry("test", "test123")); + + Property intProp = new Property(); + intProp.setId(new URI("http://mytestprop.org/testint")); + intProp.setSingleEntry(new PropertyEntry("test", 4)); + + Property intSProp = new Property(); + intSProp.setId(new URI("http://mytestprop.org/testint")); + intSProp.setSingleEntry(new PropertyEntry("test", 3)); + + Property intBProp = new Property(); + intBProp.setId(new URI("http://mytestprop.org/testint")); + intBProp.setSingleEntry(new PropertyEntry("test", 5)); + String qString = "teststring>\"test\""; + String qInt = "testint>4"; + try { + QueryTerm term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertFalse(term.calculate(stringProp)); + assertFalse(term.calculate(stringSProp)); + assertTrue(term.calculate(stringBProp)); + + term = parser.parseQuery(qInt, null); + term.setParamsResolver(paramResolver); + assertFalse(term.calculate(intProp)); + assertFalse(term.calculate(intSProp)); + assertTrue(term.calculate(intBProp)); + } catch (ResponseException e) { + Assert.fail(e.getLocalizedMessage()); + } + } + + @Test + public void testPattern() throws URISyntaxException { + Property stringProp = new Property(); + stringProp.setId(new URI("http://mytestprop.org/teststring")); + stringProp.setSingleEntry(new PropertyEntry("test", "test32")); + + Property stringNoMatchProp = new Property(); + stringNoMatchProp.setId(new URI("http://mytestprop.org/teststring")); + stringNoMatchProp.setSingleEntry(new PropertyEntry("test", "test")); + + String qString = "teststring~=\\w+32"; + try { + QueryTerm term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(stringProp)); + assertFalse(term.calculate(stringNoMatchProp)); + } catch (ResponseException e) { + Assert.fail(e.getLocalizedMessage()); + } + } + + @Test + public void testNotPattern() throws Exception { + Property stringProp = new Property(); + stringProp.setId(new URI("http://mytestprop.org/teststring")); + stringProp.setSingleEntry(new PropertyEntry("test", "test32")); + + Property stringNoMatchProp = new Property(); + stringNoMatchProp.setId(new URI("http://mytestprop.org/teststring")); + stringNoMatchProp.setSingleEntry(new PropertyEntry("test", "test")); + + String qString = "teststring!~=\\w+32"; + try { + QueryTerm term = parser.parseQuery(qString, null); + term.setParamsResolver(paramResolver); + assertFalse(term.calculate(stringProp)); + assertTrue(term.calculate(stringNoMatchProp)); + } catch ( + + BadRequestException e) { + Assert.fail(e.getLocalizedMessage()); + } + } + + @Test + public void testCompoundValueQuery() throws URISyntaxException { + String q = "testattrib[level1]==1111"; + + Property testProp = new Property(); + testProp.setId(new URI("http://mytestprop.org/testattrib")); + HashMap> value = new HashMap>(); + ArrayList valueList = new ArrayList(); + valueList.add(1111); + value.put("http://mytestprop.org/level1", valueList); + testProp.setSingleEntry(new PropertyEntry("test", value)); + try { + QueryTerm term = parser.parseQuery(q, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(testProp)); + + q = "testattrib[level1][level2]==2222"; + testProp = new Property(); + testProp.setId(new URI("http://mytestprop.org/testattrib")); + HashMap> value2 = new HashMap>(); + ArrayList valueList2 = new ArrayList(); + valueList2.add(2222); + value2.put("http://mytestprop.org/level2", valueList2); + value = new HashMap>(); + valueList = new ArrayList(); + valueList.add(value2); + value.put("http://mytestprop.org/level1", valueList); + testProp.setSingleEntry(new PropertyEntry("test", value)); + term = parser.parseQuery(q, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(testProp)); +//not a possible query at the moment + /* + * q = "testattrib.subattrib[level1][level2]==3333"; Property topProp = new + * Property(); topProp.setId(new URI("http://mytestprop.org/testattrib")); + * + * testProp = new Property(); testProp.setId(new + * URI("http://mytestprop.org/subattrib")); value2 = new HashMap>(); valueList2 = new ArrayList(); valueList2.add(3333); + * value2.put("http://mytestprop.org/level2", valueList2); value = new + * HashMap>(); valueList = new ArrayList(); + * valueList.add(value2); value.put("http://mytestprop.org/level1", valueList); + * PropertyEntry entry = new PropertyEntry("test", value); + * testProp.setSingleEntry(entry); + * + * List properties = new ArrayList(); + * properties.add(testProp); entry.setProperties(properties); term = + * parser.parseQuery(q, null); term.setParamsResolver(paramResolver); + * assertTrue(term.calculate(topProp)); + */ + } catch (ResponseException e) { + Assert.fail(e.getLocalizedMessage()); + } + } + + @Test + public void testMultiQuery() throws URISyntaxException { + String q = "(test1==\"teststring\";(test2>=12345|test3!=\"testst123ring\"))|test4<=12345"; + Property test1 = new Property(); + test1.setId(new URI("http://mytestprop.org/test1")); + test1.setSingleEntry(new PropertyEntry("test", "teststring")); + Property test1Not = new Property(); + test1Not.setId(new URI("http://mytestprop.org/test1")); + test1Not.setSingleEntry(new PropertyEntry("test", "teststringasdasdas")); + + Property test2 = new Property(); + test2.setId(new URI("http://mytestprop.org/test2")); + test2.setSingleEntry(new PropertyEntry("test", 12345)); + Property test2Not = new Property(); + test2Not.setId(new URI("http://mytestprop.org/test2")); + test2Not.setSingleEntry(new PropertyEntry("test", 1234)); + + Property test3 = new Property(); + test3.setId(new URI("http://mytestprop.org/test3")); + test3.setSingleEntry(new PropertyEntry("test", "teststring")); + Property test3Not = new Property(); + test3Not.setId(new URI("http://mytestprop.org/test3")); + test3Not.setSingleEntry(new PropertyEntry("test", "testst123ring")); + + Property test4 = new Property(); + test4.setId(new URI("http://mytestprop.org/test4")); + test4.setSingleEntry(new PropertyEntry("test", 12345)); + Property test4Not = new Property(); + test4Not.setId(new URI("http://mytestprop.org/test4")); + test4Not.setSingleEntry(new PropertyEntry("test", 123456)); + + ArrayList allValid = new ArrayList(); + allValid.add(test1); + allValid.add(test2); + allValid.add(test3); + allValid.add(test4); + + ArrayList orStillValid = new ArrayList(); + orStillValid.add(test1); + orStillValid.add(test2); + orStillValid.add(test3Not); + orStillValid.add(test4Not); + + ArrayList orStillValid2 = new ArrayList(); + orStillValid2.add(test1Not); + orStillValid2.add(test2Not); + orStillValid2.add(test3Not); + orStillValid2.add(test4); + + ArrayList notValid1 = new ArrayList(); + notValid1.add(test1Not); + notValid1.add(test2); + notValid1.add(test3Not); + notValid1.add(test4Not); + + ArrayList notValid2 = new ArrayList(); + notValid2.add(test1); + notValid2.add(test2Not); + notValid2.add(test3Not); + notValid2.add(test4Not); + try { + QueryTerm term = parser.parseQuery(q, null); + term.setParamsResolver(paramResolver); + assertTrue(term.calculate(allValid)); + assertTrue(term.calculate(orStillValid)); + assertTrue(term.calculate(orStillValid2)); + assertFalse(term.calculate(notValid1)); + assertFalse(term.calculate(notValid2)); + } catch (ResponseException e) { + Assert.fail(e.getLocalizedMessage()); + } + } +} diff --git a/scorpio-broker/Core/.gitignore b/scorpio-broker/Core/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..6d5af104f79c458a07754f033da7c8ae8bcfa483 --- /dev/null +++ b/scorpio-broker/Core/.gitignore @@ -0,0 +1,3 @@ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/Core/AtContextServer/.gitignore b/scorpio-broker/Core/AtContextServer/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..909c51ad8a826a2e4b90278784d4820bdb341b39 --- /dev/null +++ b/scorpio-broker/Core/AtContextServer/.gitignore @@ -0,0 +1,90 @@ +/target/ +/logs/ + +.metadata +bin/ +tmp/ +*.tmp +*.bak +*.swp +*~.nib +local.properties +.settings/ +.loadpath +.recommenders + +# External tool builders +.externalToolBuilders/ + +# Locally stored "Eclipse launch configurations" +*.launch + +# PyDev specific (Python IDE for Eclipse) +*.pydevproject + +# CDT-specific (C/C++ Development Tooling) +.cproject + +# CDT- autotools +.autotools + +# Java annotation processor (APT) +.factorypath + +# PDT-specific (PHP Development Tools) +.buildpath + +# sbteclipse plugin +.target + +# Tern plugin +.tern-project + +# TeXlipse plugin +.texlipse + +# STS (Spring Tool Suite) +.springBeans + +# Code Recommenders +.recommenders/ + +# Annotation Processing +.apt_generated/ + +# Scala IDE specific (Scala & Java development for Eclipse) +.cache-main +.scala_dependencies +.worksheet + +### Eclipse Patch ### +# Eclipse Core +.project + +# JDT-specific (Eclipse Java Development Tools) +.classpath + +# Annotation Processing +.apt_generated + +### Java ### +# Compiled class file +*.class + +# Log file +*.log + +# BlueJ files +*.ctxt + +# Mobile Tools for Java (J2ME) +.mtj.tmp/ + +# Package Files # +*.jar +*.war +*.nar +*.ear +*.zip +*.tar.gz +*.rar \ No newline at end of file diff --git a/scorpio-broker/Core/AtContextServer/dockerfile4maven b/scorpio-broker/Core/AtContextServer/dockerfile4maven new file mode 100644 index 0000000000000000000000000000000000000000..14f9b75af1b5e921d420fda3043b5ba4e99827c8 --- /dev/null +++ b/scorpio-broker/Core/AtContextServer/dockerfile4maven @@ -0,0 +1,12 @@ +FROM openjdk:22-ea-21-jdk-slim + +WORKDIR /usr/src/scorpio +ARG JAR_FILE_BUILD +ARG JAR_FILE_RUN +ENV JAR_FILE_RUN ${JAR_FILE_RUN} + +COPY target/${JAR_FILE_BUILD} ./${JAR_FILE_RUN} +COPY src/main/resources/application-dist.yml ./config/application.yml +ENV spring_args "" + +CMD java -jar $JAR_FILE_RUN ${spring_args} diff --git a/scorpio-broker/Core/AtContextServer/pom.xml b/scorpio-broker/Core/AtContextServer/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..32d20cecaa683c6d20eee232dff127b69da2c916 --- /dev/null +++ b/scorpio-broker/Core/AtContextServer/pom.xml @@ -0,0 +1,59 @@ + + 4.0.0 + AtContextServer + jar + 1.0.0-SNAPSHOT + + eu.neclab.ngsildbroker + BrokerParent + 1.0.0-SNAPSHOT + ../../BrokerParent + + + + docker + + + + com.spotify + dockerfile-maven-plugin + 1.4.12 + + + default + + build + push + + + + + dockerfile4maven + scorpiobroker/scorpio + ${project.artifactId}_${project.version} + + ${project.build.finalName}.jar + ${project.artifactId}.jar + + + + + + + + + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-logging + + + + + + diff --git a/scorpio-broker/Core/AtContextServer/src/main/java/eu/neclab/ngsildbroker/atcontextserver/AtContextServer.java b/scorpio-broker/Core/AtContextServer/src/main/java/eu/neclab/ngsildbroker/atcontextserver/AtContextServer.java new file mode 100644 index 0000000000000000000000000000000000000000..0b69883cfab4cfce2cde9639a1c683a479291eb1 --- /dev/null +++ b/scorpio-broker/Core/AtContextServer/src/main/java/eu/neclab/ngsildbroker/atcontextserver/AtContextServer.java @@ -0,0 +1,53 @@ +package eu.neclab.ngsildbroker.atcontextserver; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.cloud.stream.annotation.EnableBinding; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Import; +import org.springframework.web.client.RestTemplate; + +import eu.neclab.ngsildbroker.commons.ldcontext.AtContext; +import eu.neclab.ngsildbroker.commons.ldcontext.AtContextProducerChannel; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; +import eu.neclab.ngsildbroker.commons.securityConfig.SecurityConfig; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaConfig; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.swaggerConfig.SwaggerConfigDetails; + + +@SpringBootApplication +@Import({KafkaConfig.class, SwaggerConfigDetails.class}) +@EnableBinding({AtContextProducerChannel.class}) +public class AtContextServer {// implements QueryHandlerInterface{ + + public static void main(String[] args) { + SpringApplication.run(AtContextServer.class, args); + } + + @Bean + KafkaOps ops() { + return new KafkaOps(); + } + + @Bean + AtContext atCon() { + return new AtContext(); + } + + @Bean + RestTemplate restTemp() { + return new RestTemplate(); + } + + @Bean + SecurityConfig securityConfig() { + return new SecurityConfig(); + } + + @Bean + ResourceConfigDetails resourceConfigDetails() { + return new ResourceConfigDetails(); + } +} diff --git a/scorpio-broker/Core/AtContextServer/src/main/java/eu/neclab/ngsildbroker/atcontextserver/config/AtContextServerResourceConfigurer.java b/scorpio-broker/Core/AtContextServer/src/main/java/eu/neclab/ngsildbroker/atcontextserver/config/AtContextServerResourceConfigurer.java new file mode 100644 index 0000000000000000000000000000000000000000..5790078229393296efb2a757747308260a81d432 --- /dev/null +++ b/scorpio-broker/Core/AtContextServer/src/main/java/eu/neclab/ngsildbroker/atcontextserver/config/AtContextServerResourceConfigurer.java @@ -0,0 +1,27 @@ +package eu.neclab.ngsildbroker.atcontextserver.config; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer; +import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter; +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; + +/** + * REST API Resource Server. + */ +@Configuration +@EnableWebSecurity +@EnableResourceServer +@EnableGlobalMethodSecurity(prePostEnabled = true) // Allow method annotations like @PreAuthorize +public class AtContextServerResourceConfigurer extends ResourceServerConfigurerAdapter { + @Autowired + private ResourceConfigDetails resourceConfigDetails; + + @Override + public void configure(HttpSecurity http) throws Exception { + resourceConfigDetails.ngbSecurityConfig(http); + } +} diff --git a/scorpio-broker/Core/AtContextServer/src/main/java/eu/neclab/ngsildbroker/atcontextserver/controller/AtContextServerController.java b/scorpio-broker/Core/AtContextServer/src/main/java/eu/neclab/ngsildbroker/atcontextserver/controller/AtContextServerController.java new file mode 100644 index 0000000000000000000000000000000000000000..d8a1294a42fcb37a95dffa3fbc79940227f3861f --- /dev/null +++ b/scorpio-broker/Core/AtContextServer/src/main/java/eu/neclab/ngsildbroker/atcontextserver/controller/AtContextServerController.java @@ -0,0 +1,88 @@ +package eu.neclab.ngsildbroker.atcontextserver.controller; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import javax.annotation.PostConstruct; +import javax.servlet.http.HttpServletRequest; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.io.ResourceLoader; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import com.google.common.io.Files; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.ldcontext.AtContext; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; + +@RestController +@RequestMapping("ngsi-ld/contextes") +public class AtContextServerController { + private final static Logger logger = LogManager.getLogger(AtContextServerController.class); + + @Autowired + AtContext atContext; + + + @Autowired + ResourceLoader resourceLoader; + + /* + * String coreContext; + * + * @PostConstruct private void setup() { try { coreContext = new + * String(Files.asByteSource(resourceLoader.getResource( + * "classpath:ngsi-ld-core-context.jsonld").getFile()).read()); } catch + * (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } + * + * } + */ + + /** + * Method(GET) for multiple attributes separated by comma list + * + * @param request + * @param entityId + * @param attrs + * @return + */ + @GetMapping(path = "/{contextId}") + public ResponseEntity getContextForEntity(HttpServletRequest request, + @PathVariable("contextId") String contextId) { + logger.trace("getAtContext() for " + contextId); + /* + * if(contextId.equals(AppConstants.CORE_CONTEXT_URL_SUFFIX)) { return + * ResponseEntity.accepted().contentType(MediaType.APPLICATION_JSON).body( + * coreContext); } + */ + List contextes = atContext.getContextes(contextId); + StringBuilder body = new StringBuilder("{\"@context\": "); + + body.append(DataSerializer.toJson(contextes)); + body.append("}"); + return ResponseEntity.accepted().contentType(MediaType.APPLICATION_JSON).body(body.toString()); + } + + @GetMapping(name="atcontextget") + public ResponseEntity getAllContextes() { + StringBuilder body = new StringBuilder("{\n"); + //Manuallly done because gson shows the actual byte values and not a string + Map contextMapping = atContext.getAllContextes(); + for(Entry contextEntry: contextMapping.entrySet()) { + body.append(" \"" + contextEntry.getKey() + "\": \"" + new String(contextEntry.getValue()) + "\",\n"); + } + body.append("}"); + return ResponseEntity.accepted().contentType(MediaType.APPLICATION_JSON).body(body.toString()); + } + +} diff --git a/scorpio-broker/Core/AtContextServer/src/main/resources/application-aaio.yml b/scorpio-broker/Core/AtContextServer/src/main/resources/application-aaio.yml new file mode 100644 index 0000000000000000000000000000000000000000..1004024b4ff01b4211925f0a1beaf660aa836cd9 --- /dev/null +++ b/scorpio-broker/Core/AtContextServer/src/main/resources/application-aaio.yml @@ -0,0 +1,17 @@ +server: + port: 27015 + +spring: + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + +bootstrap: + servers: kafka:9092 + +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ \ No newline at end of file diff --git a/scorpio-broker/Core/AtContextServer/src/main/resources/application-aio.yml b/scorpio-broker/Core/AtContextServer/src/main/resources/application-aio.yml new file mode 100644 index 0000000000000000000000000000000000000000..d845a75d2eecaa65623df2f76b5c95a0c1f73764 --- /dev/null +++ b/scorpio-broker/Core/AtContextServer/src/main/resources/application-aio.yml @@ -0,0 +1,17 @@ +server: + port: 27015 + +spring: + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + +bootstrap: + servers: localhost:9092 + +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ \ No newline at end of file diff --git a/scorpio-broker/Core/AtContextServer/src/main/resources/application-dist.yml b/scorpio-broker/Core/AtContextServer/src/main/resources/application-dist.yml new file mode 100644 index 0000000000000000000000000000000000000000..b54d188c4bd4b7f410b5b350468f05cbb25ef659 --- /dev/null +++ b/scorpio-broker/Core/AtContextServer/src/main/resources/application-dist.yml @@ -0,0 +1,17 @@ +server: + port: 27015 + +spring: + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + +bootstrap: + servers: kafka:9092 + +eureka: + client: + serviceUrl: + defaultZone: http://eureka:8761/eureka/ \ No newline at end of file diff --git a/scorpio-broker/Core/AtContextServer/src/main/resources/application.yml b/scorpio-broker/Core/AtContextServer/src/main/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..d9af584209f205f6d815125fc5db3742537e8b1c --- /dev/null +++ b/scorpio-broker/Core/AtContextServer/src/main/resources/application.yml @@ -0,0 +1,55 @@ +spring: + application: + name: atcontext-server + main: + lazy-initialization: true + allow-bean-definition-overriding: true + kafka: + admin: + properties: + cleanup: + policy: compact + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + bindings: + ATCONTEXT_WRITE_CHANNEL: + destination: ATCONTEXT + contentType: application/json + +server: + port: 27015 + tomcat: + max: + threads: 20 +#Entity-Manager properties +entity: + topic: ENTITY +query: + topic: QUERY + result: + topic: QUERY_RESULT +csource: + query: + topic: CONTEXT_REGISTRY_QUERY + +bootstrap: + servers: localhost:9092 +#enable log compaction + + +management: + endpoints: + web: + exposure: + include: "*" + endpoint: + restart: + enabled: true + + + + + \ No newline at end of file diff --git a/scorpio-broker/Core/AtContextServer/src/main/resources/log4j2-spring.xml b/scorpio-broker/Core/AtContextServer/src/main/resources/log4j2-spring.xml new file mode 100644 index 0000000000000000000000000000000000000000..5c5ba04ba96e8a180228217253fdfb248a16302f --- /dev/null +++ b/scorpio-broker/Core/AtContextServer/src/main/resources/log4j2-spring.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + %d %p %C{1.} [%t] %m%n + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/scorpio-broker/Core/AtContextServer/src/main/resources/ngsi-ld-core-context.jsonld b/scorpio-broker/Core/AtContextServer/src/main/resources/ngsi-ld-core-context.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..fb43242d5a69cf2b5c0e7f22a45e85c1750f1368 --- /dev/null +++ b/scorpio-broker/Core/AtContextServer/src/main/resources/ngsi-ld-core-context.jsonld @@ -0,0 +1,158 @@ +{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "id": "@id", + "type": "@type", + "value": "https://uri.etsi.org/ngsi-ld/hasValue", + "object": { + "@id": "https://uri.etsi.org/ngsi-ld/hasObject", + "@type":"@id" + }, + "Property": "https://uri.etsi.org/ngsi-ld/Property", + "Relationship": "https://uri.etsi.org/ngsi-ld/Relationship", + "DateTime": "https://uri.etsi.org/ngsi-ld/DateTime", + "Date": "https://uri.etsi.org/ngsi-ld/Date", + "Time": "https://uri.etsi.org/ngsi-ld/Time", + "createdAt": { + "@id": "https://uri.etsi.org/ngsi-ld/createdAt", + "@type": "DateTime" + }, + "modifiedAt": { + "@id": "https://uri.etsi.org/ngsi-ld/modifiedAt", + "@type": "DateTime" + }, + "observedAt": { + "@id": "https://uri.etsi.org/ngsi-ld/observedAt", + "@type": "DateTime" + }, + "datasetId": { + "@id": "https://uri.etsi.org/ngsi-ld/datasetId", + "@type": "@id" + }, + "instanceId": { + "@id": "https://uri.etsi.org/ngsi-ld/instanceId", + "@type": "@id" + }, + "unitCode": "https://uri.etsi.org/ngsi-ld/unitCode", + "location": "https://uri.etsi.org/ngsi-ld/location", + "observationSpace": "https://uri.etsi.org/ngsi-ld/observationSpace", + "operationSpace": "https://uri.etsi.org/ngsi-ld/operationSpace", + "GeoProperty": "https://uri.etsi.org/ngsi-ld/GeoProperty", + "TemporalProperty": "https://uri.etsi.org/ngsi-ld/TemporalProperty", + "ContextSourceRegistration": "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration", + "Subscription": "https://uri.etsi.org/ngsi-ld/Subscription", + "Notification": "https://uri.etsi.org/ngsi-ld/Notification", + "ContextSourceNotification": "https://uri.etsi.org/ngsi-ld/ContextSourceNotification", + "title": "https://uri.etsi.org/ngsi-ld/title", + "detail": "https://uri.etsi.org/ngsi-ld/detail", + "idPattern": "https://uri.etsi.org/ngsi-ld/idPattern", + "name": "https://uri.etsi.org/ngsi-ld/name", + "description": "https://uri.etsi.org/ngsi-ld/description", + "information": "https://uri.etsi.org/ngsi-ld/information", + "observationInterval": "https://uri.etsi.org/ngsi-ld/observationInterval", + "managementInterval": "https://uri.etsi.org/ngsi-ld/managementInterval", + "expires": { + "@id": "https://uri.etsi.org/ngsi-ld/expires", + "@type": "DateTime" + }, + "endpoint": "https://uri.etsi.org/ngsi-ld/endpoint", + "entities": "https://uri.etsi.org/ngsi-ld/entities", + "properties": { + "@id": "https://uri.etsi.org/ngsi-ld/properties", + "@type": "@vocab" + }, + "relationships": { + "@id": "https://uri.etsi.org/ngsi-ld/relationships", + "@type": "@vocab" + }, + "start": { + "@id": "https://uri.etsi.org/ngsi-ld/start", + "@type": "DateTime" + }, + "end": { + "@id": "https://uri.etsi.org/ngsi-ld/end", + "@type": "DateTime" + }, + "watchedAttributes":{ + "@id": "https://uri.etsi.org/ngsi-ld/watchedAttributes", + "@type": "@vocab" + }, + "timeInterval": "https://uri.etsi.org/ngsi-ld/timeInterval", + "q": "https://uri.etsi.org/ngsi-ld/q", + "geoQ": "https://uri.etsi.org/ngsi-ld/geoQ", + "csf": "https://uri.etsi.org/ngsi-ld/csf", + "isActive": "https://uri.etsi.org/ngsi-ld/isActive", + "notification": "https://uri.etsi.org/ngsi-ld/notification", + "status": "https://uri.etsi.org/ngsi-ld/status", + "throttling": "https://uri.etsi.org/ngsi-ld/throttling", + "temporalQ": "https://uri.etsi.org/ngsi-ld/temporalQ", + "geometry": "https://uri.etsi.org/ngsi-ld/geometry", + "coordinates": "https://uri.etsi.org/ngsi-ld/coordinates", + "georel": "https://uri.etsi.org/ngsi-ld/georel", + "geoproperty": "https://uri.etsi.org/ngsi-ld/geoproperty", + "attributes": { + "@id": "https://uri.etsi.org/ngsi-ld/attributes", + "@type": "@vocab" + }, + "format": "https://uri.etsi.org/ngsi-ld/format", + "timesSent": "https://uri.etsi.org/ngsi-ld/timesSent", + "lastNotification":{ + "@id": "https://uri.etsi.org/ngsi-ld/lastNotification", + "@type": "DateTime" + }, + "lastFailure":{ + "@id": "https://uri.etsi.org/ngsi-ld/lastFailure", + "@type": "DateTime" + }, + "lastSuccess":{ + "@id": "https://uri.etsi.org/ngsi-ld/lastSuccess", + "@type": "DateTime" + }, + "uri": "https://uri.etsi.org/ngsi-ld/uri", + "accept": "https://uri.etsi.org/ngsi-ld/accept", + "success": { + "@id": "https://uri.etsi.org/ngsi-ld/success", + "@type": "@id" + }, + "errors": "https://uri.etsi.org/ngsi-ld/errors", + "error": "https://uri.etsi.org/ngsi-ld/error", + "entityId": { + "@id": "https://uri.etsi.org/ngsi-ld/entityId", + "@type": "@id" + }, + "updated": "https://uri.etsi.org/ngsi-ld/updated", + "unchanged": "https://uri.etsi.org/ngsi-ld/unchanged", + "attributeName": "https://uri.etsi.org/ngsi-ld/attributeName", + "reason": "https://uri.etsi.org/ngsi-ld/reason", + "timerel": "https://uri.etsi.org/ngsi-ld/timerel", + "time": { + "@id": "https://uri.etsi.org/ngsi-ld/time", + "@type": "DateTime" + }, + "endTime": { + "@id": "https://uri.etsi.org/ngsi-ld/endTime", + "@type": "DateTime" + }, + "timeproperty": "https://uri.etsi.org/ngsi-ld/timeproperty", + "subscriptionId": { + "@id": "https://uri.etsi.org/ngsi-ld/subscriptionId", + "@type": "@id" + }, + "notifiedAt":{ + "@id": "https://uri.etsi.org/ngsi-ld/notifiedAt", + "@type": "DateTime" + }, + "data": "https://uri.etsi.org/ngsi-ld/data", + "triggerReason": "https://uri.etsi.org/ngsi-ld/triggerReason", + "values":{ + "@id": "https://uri.etsi.org/ngsi-ld/hasValues", + "@container": "@list" + }, + "objects":{ + "@id": "https://uri.etsi.org/ngsi-ld/hasObjects", + "@type": "@id", + "@container": "@list" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } +} diff --git a/scorpio-broker/Core/EntityManager/.gitignore b/scorpio-broker/Core/EntityManager/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..909c51ad8a826a2e4b90278784d4820bdb341b39 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/.gitignore @@ -0,0 +1,90 @@ +/target/ +/logs/ + +.metadata +bin/ +tmp/ +*.tmp +*.bak +*.swp +*~.nib +local.properties +.settings/ +.loadpath +.recommenders + +# External tool builders +.externalToolBuilders/ + +# Locally stored "Eclipse launch configurations" +*.launch + +# PyDev specific (Python IDE for Eclipse) +*.pydevproject + +# CDT-specific (C/C++ Development Tooling) +.cproject + +# CDT- autotools +.autotools + +# Java annotation processor (APT) +.factorypath + +# PDT-specific (PHP Development Tools) +.buildpath + +# sbteclipse plugin +.target + +# Tern plugin +.tern-project + +# TeXlipse plugin +.texlipse + +# STS (Spring Tool Suite) +.springBeans + +# Code Recommenders +.recommenders/ + +# Annotation Processing +.apt_generated/ + +# Scala IDE specific (Scala & Java development for Eclipse) +.cache-main +.scala_dependencies +.worksheet + +### Eclipse Patch ### +# Eclipse Core +.project + +# JDT-specific (Eclipse Java Development Tools) +.classpath + +# Annotation Processing +.apt_generated + +### Java ### +# Compiled class file +*.class + +# Log file +*.log + +# BlueJ files +*.ctxt + +# Mobile Tools for Java (J2ME) +.mtj.tmp/ + +# Package Files # +*.jar +*.war +*.nar +*.ear +*.zip +*.tar.gz +*.rar \ No newline at end of file diff --git a/scorpio-broker/Core/EntityManager/dockerfile4maven b/scorpio-broker/Core/EntityManager/dockerfile4maven new file mode 100644 index 0000000000000000000000000000000000000000..14f9b75af1b5e921d420fda3043b5ba4e99827c8 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/dockerfile4maven @@ -0,0 +1,12 @@ +FROM openjdk:22-ea-21-jdk-slim + +WORKDIR /usr/src/scorpio +ARG JAR_FILE_BUILD +ARG JAR_FILE_RUN +ENV JAR_FILE_RUN ${JAR_FILE_RUN} + +COPY target/${JAR_FILE_BUILD} ./${JAR_FILE_RUN} +COPY src/main/resources/application-dist.yml ./config/application.yml +ENV spring_args "" + +CMD java -jar $JAR_FILE_RUN ${spring_args} diff --git a/scorpio-broker/Core/EntityManager/pom.xml b/scorpio-broker/Core/EntityManager/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..d851027ac9d53e37137ad81fec03e8a5f9402be3 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/pom.xml @@ -0,0 +1,120 @@ + + 4.0.0 + EntityManager + jar + + eu.neclab.ngsildbroker + BrokerParent + 1.0.0-SNAPSHOT + ../../BrokerParent + + 1.0.0-SNAPSHOT + + + docker + + + + com.spotify + dockerfile-maven-plugin + 1.4.12 + + + default + + build + push + + + + + dockerfile4maven + scorpiobroker/scorpio + ${project.artifactId}_${project.version} + + ${project.build.finalName}.jar + ${project.artifactId}.jar + + + + + + + + + + + + + org.springframework.boot + spring-boot-starter-test + test + + + org.springframework.boot + spring-boot-starter-logging + + + + + org.springframework.boot + spring-boot-starter-jdbc + + + org.springframework.boot + spring-boot-starter-logging + + + + + org.flywaydb + flyway-core + + + org.postgresql + postgresql + + + + + org.powermock + powermock-module-junit4 + 2.0.2 + test + + + org.powermock + powermock-api-mockito2 + 2.0.2 + test + + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-logging + + + + + junit + junit + test + + + + + diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/EntityHandler.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/EntityHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..eb5e3eb271c41cc9fdb91a4f60b8727387bbae23 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/EntityHandler.java @@ -0,0 +1,78 @@ +package eu.neclab.ngsildbroker.entityhandler; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.cloud.stream.annotation.EnableBinding; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Import; +import org.springframework.context.annotation.Primary; + +import eu.neclab.ngsildbroker.commons.ldcontext.AtContextProducerChannel; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.ngsiqueries.QueryParser; +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; +import eu.neclab.ngsildbroker.commons.securityConfig.SecurityConfig; +import eu.neclab.ngsildbroker.commons.stream.service.CommonKafkaConfig; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaConfig; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.swaggerConfig.SwaggerConfigDetails; +import eu.neclab.ngsildbroker.entityhandler.config.EntityProducerChannel; +import eu.neclab.ngsildbroker.entityhandler.config.EntityTopicMap; + + +//@Component(immediate=true) +@SpringBootApplication +@EnableBinding({ EntityProducerChannel.class, AtContextProducerChannel.class }) // enable channel binding with topics +@Import({CommonKafkaConfig.class, SwaggerConfigDetails.class}) +public class EntityHandler { + public static void main(String[] args) { + SpringApplication.run(EntityHandler.class, args); + } + + + + @Bean("emops") + @Primary + KafkaOps ops() { + return new KafkaOps(); + } + @Bean("emconRes") + @Primary + ContextResolverBasic conRes() { + return new ContextResolverBasic(); + } + + + + @Bean("emsec") + SecurityConfig securityConfig() { + return new SecurityConfig(); + } + + @Bean("emresconfdet") + ResourceConfigDetails resourceConfigDetails() { + return new ResourceConfigDetails(); + } + + @Bean("emparamsres") + @Primary + ParamsResolver paramsResolver() { + return new ParamsResolver(); + } + + @Bean("emqueryparser") + @Primary + QueryParser queryParser() { + return new QueryParser(); + } + + @Bean("emtopicmap") + @Primary + EntityTopicMap entityTopicMap() { + return new EntityTopicMap(); + } + + + +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/config/EntityManagerResourceConfigurer.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/config/EntityManagerResourceConfigurer.java new file mode 100644 index 0000000000000000000000000000000000000000..3281481c218de5250fe6cac95fc1338133b653a3 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/config/EntityManagerResourceConfigurer.java @@ -0,0 +1,30 @@ +package eu.neclab.ngsildbroker.entityhandler.config; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer; +import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter; + +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; + +/** + * REST API Resource Server. + */ +@ConditionalOnWebApplication +@Configuration +@EnableWebSecurity +@EnableResourceServer +@EnableGlobalMethodSecurity(prePostEnabled = true) // Allow method annotations like @PreAuthorize +public class EntityManagerResourceConfigurer extends ResourceServerConfigurerAdapter { + @Autowired + private ResourceConfigDetails resourceConfigDetails; + + @Override + public void configure(HttpSecurity http) throws Exception { + resourceConfigDetails.ngbSecurityConfig(http); + } +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/config/EntityProducerChannel.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/config/EntityProducerChannel.java new file mode 100644 index 0000000000000000000000000000000000000000..c8e2d43d981c078a92757e2c86bcdcd23afd27f8 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/config/EntityProducerChannel.java @@ -0,0 +1,59 @@ +package eu.neclab.ngsildbroker.entityhandler.config; + +import org.springframework.cloud.stream.annotation.Output; +import org.springframework.messaging.MessageChannel; + +import eu.neclab.ngsildbroker.commons.stream.interfaces.IProducerChannels; + +/** + * + * @version 1.0 + * @date 10-Jul-2018 + */ + +public interface EntityProducerChannel extends IProducerChannels { + + public String entityWriteChannel = "ENTITY_WRITE_CHANNEL"; + public String kvEntityWriteChannel = "KVENTITY_WRITE_CHANNEL"; + public String entityWithoutSysAttrsWriteChannel = "ENTITY_WITHOUT_SYSATTRS_WRITE_CHANNEL"; + public String createWriteChannel = "CREATE_WRITE_CHANNEL"; + public String appendWriteChannel = "APPEND_WRITE_CHANNEL"; + public String updateWriteChannel = "UPDATE_WRITE_CHANNEL"; + public String deleteWriteChannel = "DELETE_WRITE_CHANNEL"; + public String contextRegistryWriteChannel="CONTEXT_REGISTRY_WRITE_CHANNEL"; + public String contextUpdateWriteChannel="CONTEXT_REGISTRY_UPDATE_CHANNEL"; + public String entityIndexWriteChannel="INDEX_WRITE_CHANNEL"; + + + @Output(entityWriteChannel) + MessageChannel entityWriteChannel(); + + @Output(kvEntityWriteChannel) + MessageChannel kvEntityWriteChannel(); + + @Output(entityWithoutSysAttrsWriteChannel) + MessageChannel entityWithoutSysAttrsWriteChannel(); + + @Output(createWriteChannel) + MessageChannel createWriteChannel(); + + @Output(appendWriteChannel) + MessageChannel appendWriteChannel(); + + @Output(updateWriteChannel) + MessageChannel updateWriteChannel(); + + @Output(deleteWriteChannel) + MessageChannel deleteWriteChannel(); + + @Output(contextRegistryWriteChannel) + MessageChannel contextRegistryWriteChannel(); + + @Output(contextUpdateWriteChannel) + MessageChannel contextUpdateWriteChannel(); + + @Output(entityIndexWriteChannel) + MessageChannel entityIndexWriteChannel(); + + +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/config/EntityTopicMap.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/config/EntityTopicMap.java new file mode 100644 index 0000000000000000000000000000000000000000..9a39835705e8581f44f38db3285a58f248f17cba --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/config/EntityTopicMap.java @@ -0,0 +1,31 @@ +package eu.neclab.ngsildbroker.entityhandler.config; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +import eu.neclab.ngsildbroker.commons.datatypes.EntityDetails; + + +public class EntityTopicMap { + + public EntityTopicMap(){} + + private static Map topicMap = new ConcurrentHashMap(1000); + + public void put(String key, EntityDetails details) { + topicMap.put(key, details); + } + + public EntityDetails get(String key) { + return topicMap.get(key); + } + + public boolean isExist(String key) { + return topicMap.containsKey(key); + } + + public void remove(String key) { + topicMap.remove(key); + } + +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/controller/EntityBatchController.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/controller/EntityBatchController.java new file mode 100644 index 0000000000000000000000000000000000000000..77a3d7a81c27096058a3c57b389d0b6cfbd6e529 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/controller/EntityBatchController.java @@ -0,0 +1,115 @@ +package eu.neclab.ngsildbroker.entityhandler.controller; + +import java.io.UnsupportedEncodingException; +import java.net.MalformedURLException; + +import javax.annotation.PostConstruct; +import javax.servlet.http.HttpServletRequest; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.datatypes.BatchResult; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; +import eu.neclab.ngsildbroker.commons.tools.SerializationTools; +import eu.neclab.ngsildbroker.entityhandler.services.EntityService; + +@RestController +@RequestMapping("/ngsi-ld/v1/entityOperations") +public class EntityBatchController { + + @Autowired + EntityService entityService; + + @Autowired + @Qualifier("emconRes") + ContextResolverBasic contextResolver; + + HttpUtils httpUtils; + + @PostConstruct + private void setup() { + httpUtils = HttpUtils.getInstance(contextResolver); + } + + @PostMapping("/create") + public ResponseEntity createMultiple(HttpServletRequest request, @RequestBody String payload) + throws ResponseException { + try { + HttpUtils.doPreflightCheck(request, payload); + String resolved = httpUtils.expandPayload(request, payload, AppConstants.BATCH_URL_ID); + BatchResult result = entityService.createMultipleMessage(resolved); + return generateBatchResultReply(result, HttpStatus.CREATED); + } catch (MalformedURLException | UnsupportedEncodingException e) { + throw new ResponseException(ErrorType.BadRequestData); + } + } + + private ResponseEntity generateBatchResultReply(BatchResult result, HttpStatus okStatus) { + HttpStatus status = HttpStatus.MULTI_STATUS; + String body = DataSerializer.toJson(result); + if (result.getFails().isEmpty()) { + status = okStatus; + body = null; + } + if (result.getSuccess().isEmpty()) { + status = HttpStatus.BAD_REQUEST; + } + + if (body == null) { + return ResponseEntity.status(status).build(); + } + return httpUtils.generateReply(body, null, status, false); + } + + @PostMapping("/upsert") + public ResponseEntity upsertMultiple(HttpServletRequest request, @RequestBody String payload) + throws ResponseException { + try { + HttpUtils.doPreflightCheck(request, payload); + String resolved = httpUtils.expandPayload(request, payload, AppConstants.BATCH_URL_ID); + BatchResult result = entityService.upsertMultipleMessage(resolved); + return generateBatchResultReply(result, HttpStatus.NO_CONTENT); + } catch (MalformedURLException | UnsupportedEncodingException e) { + throw new ResponseException(ErrorType.BadRequestData); + } + } + + @PostMapping("/update") + public ResponseEntity updateMultiple(HttpServletRequest request, @RequestBody String payload) + throws ResponseException { + try { + HttpUtils.doPreflightCheck(request, payload); + String resolved = httpUtils.expandPayload(request, payload, AppConstants.BATCH_URL_ID); + BatchResult result = entityService.updateMultipleMessage(resolved); + return generateBatchResultReply(result, HttpStatus.NO_CONTENT); + } catch (MalformedURLException | UnsupportedEncodingException e) { + throw new ResponseException(ErrorType.BadRequestData); + } + } + + @PostMapping("/delete") + public ResponseEntity deleteMultiple(HttpServletRequest request, @RequestBody String payload) + throws ResponseException { + try { +// String resolved = httpUtils.expandPayload(request, payload); + // it's an array of uris which is not json-ld so no expanding here + BatchResult result = entityService.deleteMultipleMessage(payload); + return generateBatchResultReply(result, HttpStatus.NO_CONTENT); + } catch (ResponseException e) { + throw new ResponseException(ErrorType.BadRequestData); + } + } + +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/controller/EntityController.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/controller/EntityController.java new file mode 100644 index 0000000000000000000000000000000000000000..55482b85732ba61d2418ffc599087bca544aff6c --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/controller/EntityController.java @@ -0,0 +1,318 @@ +package eu.neclab.ngsildbroker.entityhandler.controller; + +import java.time.LocalDateTime; +import java.time.format.DateTimeParseException; +import javax.annotation.PostConstruct; +import javax.servlet.http.HttpServletRequest; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.PatchMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.gson.JsonParseException; +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.datatypes.AppendResult; +import eu.neclab.ngsildbroker.commons.datatypes.RestResponse; +import eu.neclab.ngsildbroker.commons.datatypes.UpdateResult; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; +import eu.neclab.ngsildbroker.entityhandler.config.EntityProducerChannel; +import eu.neclab.ngsildbroker.entityhandler.services.EntityService; +import eu.neclab.ngsildbroker.entityhandler.validationutil.Validator; + +/** + * + * @version 1.0 + * @date 10-Jul-2018 + */ +@RestController +@RequestMapping("/ngsi-ld/v1/entities") +public class EntityController {// implements EntityHandlerInterface { + + private final static Logger logger = LoggerFactory.getLogger(EntityController.class); + + @Autowired + EntityService entityService; + @Autowired + ObjectMapper objectMapper; + +// @Autowired +// @Qualifier("emops") +// KafkaOps kafkaOps; + + @Autowired + @Qualifier("emconRes") + ContextResolverBasic contextResolver; + + @Autowired + @Qualifier("emparamsres") + ParamsResolver paramsResolver; + + @SuppressWarnings("unused") + // TODO check to remove ... never used + private EntityProducerChannel producerChannel; + + @Autowired + public EntityController(EntityProducerChannel producerChannel) { + this.producerChannel = producerChannel; + } + + private HttpUtils httpUtils; + + @PostConstruct + private void setup() { + this.httpUtils = HttpUtils.getInstance(contextResolver); + } + + LocalDateTime start; + LocalDateTime end; + + public EntityController() { + } + + /** + * Method(POST) for "/ngsi-ld/v1/entities/" rest endpoint. + * + * @param payload + * jsonld message + * @return ResponseEntity object + */ + @PostMapping + public ResponseEntity createEntity(HttpServletRequest request, + @RequestBody(required = false) String payload) { + String result = null; + try { + HttpUtils.doPreflightCheck(request, payload); + logger.trace("create entity :: started"); + logger.info("Stratos :: im here"); + String resolved = httpUtils.expandPayload(request, payload, AppConstants.ENTITIES_URL_ID); + //entityService.validateEntity(resolved, request); + + result = entityService.createMessage(resolved); + logger.trace("create entity :: completed"); + return ResponseEntity.status(HttpStatus.CREATED).header("location", AppConstants.ENTITES_URL + result) + .build(); + } catch (ResponseException exception) { + logger.error("Exception :: ", exception); + exception.printStackTrace(); + return ResponseEntity.status(exception.getHttpStatus()).body(new RestResponse(exception).toJsonBytes()); + } catch(DateTimeParseException exception) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body(new RestResponse(ErrorType.BadRequestData, "Failed to parse provided datetime field.").toJsonBytes()); + } catch(JsonParseException exception) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body(new RestResponse(ErrorType.BadRequestData, "There is an error in the provided json document").toJsonBytes()); + } catch (Exception exception) { + logger.error("Exception :: ", exception); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, exception.getLocalizedMessage()).toJsonBytes()); + } + } + + /** + * Method(PATCH) for "/ngsi-ld/v1/entities/{entityId}/attrs" rest endpoint. + * + * @param entityId + * @param payload + * json ld message + * @return ResponseEntity object + */ + @PatchMapping("/{entityId}/attrs") + public ResponseEntity updateEntity(HttpServletRequest request, @PathVariable("entityId") String entityId, + @RequestBody String payload) { + // String resolved = contextResolver.resolveContext(payload); + try { + HttpUtils.doPreflightCheck(request, payload); + logger.trace("update entity :: started"); + String resolved = httpUtils.expandPayload(request, payload, AppConstants.ENTITIES_URL_ID); + + UpdateResult update = entityService.updateMessage(entityId, resolved); + logger.trace("update entity :: completed"); + if (update.getUpdateResult()) { + return ResponseEntity.noContent().build(); + } else { + return ResponseEntity.status(HttpStatus.MULTI_STATUS).body(objectMapper.writeValueAsBytes(update.getAppendedJsonFields())); + } + } catch (ResponseException responseException) { + logger.error("Exception :: ", responseException); + return ResponseEntity.status(responseException.getHttpStatus()).body(new RestResponse(responseException).toJsonBytes()); + } catch(DateTimeParseException exception) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body(new RestResponse(ErrorType.BadRequestData, "Failed to parse provided datetime field.").toJsonBytes()); + } catch(JsonParseException exception) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body(new RestResponse(ErrorType.BadRequestData, "There is an error in the provided json document").toJsonBytes()); + } catch (Exception e) { + logger.error("Exception :: ", e); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, e.getLocalizedMessage()).toJsonBytes()); + } + } + + /** + * Method(POST) for "/ngsi-ld/v1/entities/{entityId}/attrs" rest endpoint. + * + * @param entityId + * @param payload + * jsonld message + * @return ResponseEntity object + */ + @PostMapping("/{entityId}/attrs") + public ResponseEntity appendEntity(HttpServletRequest request, @PathVariable("entityId") String entityId, + @RequestBody String payload, @RequestParam(required = false, name = "options") String options) { + // String resolved = contextResolver.resolveContext(payload); + try { + HttpUtils.doPreflightCheck(request, payload); + logger.trace("append entity :: started"); + String resolved = httpUtils.expandPayload(request, payload, AppConstants.ENTITIES_URL_ID); + + AppendResult append = entityService.appendMessage(entityId, resolved, options); + logger.trace("append entity :: completed"); + if (append.getAppendResult()) { + return ResponseEntity.noContent().build(); + } else { + return ResponseEntity.status(HttpStatus.MULTI_STATUS).body(objectMapper.writeValueAsBytes(append.getAppendedJsonFields())); + } + } catch (ResponseException responseException) { + logger.error("Exception :: ", responseException); + return ResponseEntity.status(responseException.getHttpStatus()).body(new RestResponse(responseException).toJsonBytes()); + } catch(DateTimeParseException exception) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body(new RestResponse(ErrorType.BadRequestData, "Failed to parse provided datetime field.").toJsonBytes()); + } catch(JsonParseException exception) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body(new RestResponse(ErrorType.BadRequestData, "There is an error in the provided json document").toJsonBytes()); + } catch (Exception exception) { + logger.error("Exception :: ", exception); + exception.printStackTrace(); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, exception.getLocalizedMessage()).toJsonBytes()); + } + } + + /** + * Method(PATCH) for "/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}" rest + * endpoint. + * + * @param entityId + * @param attrId + * @param payload + * @return + */ + @PatchMapping("/{entityId}/attrs/{attrId}") + public ResponseEntity partialUpdateEntity(HttpServletRequest request, + @PathVariable("entityId") String entityId, @PathVariable("attrId") String attrId, + @RequestBody String payload) { + try { + HttpUtils.doPreflightCheck(request, payload); + logger.trace("partial-update entity :: started"); + String expandedPayload = httpUtils.expandPayload(request, payload, AppConstants.ENTITIES_URL_ID); + + String expandedAttrib = paramsResolver.expandAttribute(attrId, payload, request); + + UpdateResult update = entityService.partialUpdateEntity(entityId, expandedAttrib, expandedPayload); + logger.trace("partial-update entity :: completed"); + if (update.getStatus()) { + return ResponseEntity.noContent().build(); + } else { + return ResponseEntity.status(HttpStatus.BAD_REQUEST).build(); + } + /* + * There is no 207 multi status response in the Partial Attribute Update + * operation. Section 6.7.3.1 else { return + * ResponseEntity.status(HttpStatus.MULTI_STATUS).body(update. + * getAppendedJsonFields()); } + */ + } catch (ResponseException responseException) { + logger.error("Exception :: ", responseException); + return ResponseEntity.status(responseException.getHttpStatus()).body(new RestResponse(responseException).toJsonBytes()); + } catch(DateTimeParseException exception) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body(new RestResponse(ErrorType.BadRequestData, "Failed to parse provided datetime field.").toJsonBytes()); + } catch(JsonParseException exception) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body(new RestResponse(ErrorType.BadRequestData, "There is an error in the provided json document").toJsonBytes()); + } catch (Exception exception) { + logger.error("Exception :: ", exception); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, exception.getLocalizedMessage()).toJsonBytes()); + } + } + + /** + * Method(DELETE) for "/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}" rest + * endpoint. + * + * @param entityId + * @param attrId + * @return + */ + @DeleteMapping("/{entityId}/attrs/{attrId}") + public ResponseEntity deleteAttribute(HttpServletRequest request, @PathVariable("entityId") String entityId, + @PathVariable("attrId") String attrId,@RequestParam(value = "datasetId", required = false) String datasetId, + @RequestParam(value = "deleteAll", required = false) String deleteAll) { + try { + logger.trace("delete attribute :: started"); + Validator.validate(request.getParameterMap()); + String expandedAttrib = paramsResolver.expandAttribute(attrId, HttpUtils.getAtContext(request)); + entityService.deleteAttribute(entityId, expandedAttrib,datasetId,deleteAll); + logger.trace("delete attribute :: completed"); + return ResponseEntity.noContent().build(); + } catch (ResponseException responseException) { + logger.error("Exception :: ", responseException); + return ResponseEntity.status(responseException.getHttpStatus()).body(new RestResponse(responseException).toJsonBytes()); + } catch(DateTimeParseException exception) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body(new RestResponse(ErrorType.BadRequestData, "Failed to parse provided datetime field.").toJsonBytes()); + } catch(JsonParseException exception) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body(new RestResponse(ErrorType.BadRequestData, "There is an error in the provided json document").toJsonBytes()); + } catch (Exception exception) { + logger.error("Exception :: ", exception); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, exception.getLocalizedMessage()).toJsonBytes()); + } + } + + /** + * Method(DELETE) for "/ngsi-ld/v1/entities/{entityId}" rest endpoint. + * + * @param entityId + * @return + */ + @DeleteMapping("/{entityId}") + public ResponseEntity deleteEntity(@PathVariable("entityId") String entityId) { + try { + logger.trace("delete entity :: started"); + entityService.deleteEntity(entityId); + logger.trace("delete entity :: completed"); + return ResponseEntity.noContent().build(); + } catch (ResponseException responseException) { + logger.error("Exception :: ", responseException); + return ResponseEntity.status(responseException.getHttpStatus()).body(new RestResponse(responseException).toJsonBytes()); + } catch(DateTimeParseException exception) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body(new RestResponse(ErrorType.BadRequestData, "Failed to parse provided datetime field.").toJsonBytes()); + } catch(JsonParseException exception) { + return ResponseEntity.status(HttpStatus.BAD_REQUEST) + .body(new RestResponse(ErrorType.BadRequestData, "There is an error in the provided json document").toJsonBytes()); + } catch (Exception exception) { + logger.error("Exception :: ", exception); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, exception.getLocalizedMessage()).toJsonBytes()); + } + } +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/services/EntityInfoDAO.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/services/EntityInfoDAO.java new file mode 100644 index 0000000000000000000000000000000000000000..eea70c6ef40c248d844972bb70ea1de61764449a --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/services/EntityInfoDAO.java @@ -0,0 +1,22 @@ +package eu.neclab.ngsildbroker.entityhandler.services; + +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.springframework.stereotype.Repository; + +import eu.neclab.ngsildbroker.commons.storage.StorageReaderDAO; + +@Repository +public class EntityInfoDAO extends StorageReaderDAO { + public Set getAllIds() { + List tempList = readerJdbcTemplate.queryForList("SELECT id FROM entity", String.class); + return new HashSet(tempList); + } + + public String getEntity(String entityId) { + List tempList = readerJdbcTemplate.queryForList("SELECT data FROM entity WHERE id='" + entityId + "'", String.class); + return tempList.get(0); + } +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/services/EntityService.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/services/EntityService.java new file mode 100644 index 0000000000000000000000000000000000000000..4b6d2c9e807ddd58dfaf01bb88333fe8be7bb432 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/services/EntityService.java @@ -0,0 +1,1279 @@ +package eu.neclab.ngsildbroker.entityhandler.services; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URI; +import java.net.URISyntaxException; +import java.sql.SQLException; +import java.sql.SQLTransientConnectionException; +import java.time.Instant; +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.PostConstruct; +import javax.servlet.http.HttpServletRequest; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpStatus; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.kafka.listener.MessageListenerContainer; +import org.springframework.kafka.support.Acknowledgment; +import org.springframework.kafka.support.KafkaHeaders; +import org.springframework.messaging.Message; +import org.springframework.messaging.MessageChannel; +import org.springframework.messaging.handler.annotation.Header; +import org.springframework.messaging.handler.annotation.Payload; +import org.springframework.stereotype.Service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.github.filosganga.geogson.model.Geometry; +import com.google.gson.JsonParseException; +import com.netflix.discovery.EurekaClient; +import eu.neclab.ngsildbroker.commons.constants.DBConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.AppendResult; +import eu.neclab.ngsildbroker.commons.datatypes.BatchFailure; +import eu.neclab.ngsildbroker.commons.datatypes.BatchResult; +import eu.neclab.ngsildbroker.commons.datatypes.CSourceRegistration; +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.datatypes.EntityDetails; +import eu.neclab.ngsildbroker.commons.datatypes.EntityInfo; +import eu.neclab.ngsildbroker.commons.datatypes.GeoProperty; +import eu.neclab.ngsildbroker.commons.datatypes.Information; +import eu.neclab.ngsildbroker.commons.datatypes.Property; +import eu.neclab.ngsildbroker.commons.datatypes.Relationship; +import eu.neclab.ngsildbroker.commons.datatypes.RestResponse; +import eu.neclab.ngsildbroker.commons.datatypes.TimeInterval; +import eu.neclab.ngsildbroker.commons.datatypes.UpdateResult; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.KafkaWriteException; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.tools.MicroServiceUtils; +import eu.neclab.ngsildbroker.commons.tools.SerializationTools; +import eu.neclab.ngsildbroker.entityhandler.config.EntityProducerChannel; +import eu.neclab.ngsildbroker.entityhandler.config.EntityTopicMap; +import eu.neclab.ngsildbroker.entityhandler.validationutil.IdValidationRule; +import eu.neclab.ngsildbroker.entityhandler.validationutil.PropertyValidatioRule; +import eu.neclab.ngsildbroker.entityhandler.validationutil.RelationshipValidationRule; +import eu.neclab.ngsildbroker.entityhandler.validationutil.TypeValidationRule; +import eu.neclab.ngsildbroker.entityhandler.validationutil.ValidationRules; + +@Service +public class EntityService { + + @Value("${entity.topic}") + String ENTITY_TOPIC; + @Value("${entity.create.topic}") + String ENTITY_CREATE_TOPIC; + @Value("${entity.append.topic}") + String ENTITY_APPEND_TOPIC; + @Value("${entity.update.topic}") + String ENTITY_UPDATE_TOPIC; + @Value("${entity.delete.topic}") + String ENTITY_DELETE_TOPIC; + @Value("${bootstrap.servers}") + String bootstrapServers; + @Value("${append.overwrite}") + String appendOverwriteFlag; + @Value("${entity.index.topic}") + String ENTITY_INDEX; + + @Value("${batchoperations.maxnumber.create:-1}") + int maxCreateBatch; + @Value("${batchoperations.maxnumber.update:-1}") + int maxUpdateBatch; + @Value("${batchoperations.maxnumber.upsert:-1}") + int maxUpsertBatch; + @Value("${batchoperations.maxnumber.delete:-1}") + int maxDeleteBatch; + + boolean directDB = true; + @Autowired + @Qualifier("emstorage") + StorageWriterDAO storageWriterDao; + + @Autowired + EntityInfoDAO entityInfoDAO; + + @Autowired + @Qualifier("emops") + KafkaOps operations; + + @Autowired + @Qualifier("emparamsres") + ParamsResolver paramsResolver; + + final private String regexNgsildAttributeTypes = new String(NGSIConstants.NGSI_LD_PROPERTY + "|" + + NGSIConstants.NGSI_LD_RELATIONSHIP + "|" + NGSIConstants.NGSI_LD_GEOPROPERTY); + + public void setOperations(KafkaOps operations) { + this.operations = operations; + } + + ObjectMapper objectMapper; + @Autowired + private EurekaClient eurekaClient; + @Autowired + @Qualifier("emconRes") + ContextResolverBasic contextResolver; + /* + * @Autowired + * + * @Qualifier("emtopicmap") EntityTopicMap entityTopicMap; + */ + + private final EntityProducerChannel producerChannels; + + LocalDateTime start; + LocalDateTime end; + private Set entityIds = new HashSet(); + + private final static Logger logger = LogManager.getLogger(EntityService.class); + + public EntityService(EntityProducerChannel producerChannels, ObjectMapper objectMapper) { + this.producerChannels = producerChannels; + this.objectMapper = objectMapper; + } + + // @PostConstruct + // private void setupContextResolver() { + // this.contextResolver = + // ContextResolverService.getInstance(producerChannels.atContextWriteChannel(), + // operations); + // } + + // construct in-memory + @PostConstruct + private void loadStoredEntitiesDetails() throws IOException { + synchronized (this.entityIds) { + this.entityIds = entityInfoDAO.getAllIds(); + } + /* + * Map entities = + * this.operations.getAllEntitiesDetails(); + * logger.trace("filling in-memory hashmap started:"); for (EntityDetails entity + * : entities.values()) { logger.trace("key :: " + entity.getKey()); + * entityTopicMap.put(entity.getKey(), entity); } + */logger.trace("filling in-memory hashmap completed:"); + } + + /** + * Method to publish jsonld message to kafka topic + * + * @param payload jsonld message + * @return RestResponse + * @throws KafkaWriteException,Exception + * @throws ResponseException + */ + public String createMessage(String payload) throws ResponseException, Exception { + // get message channel for ENTITY_CREATE topic. + logger.debug("createMessage() :: started"); + // MessageChannel messageChannel = producerChannels.createWriteChannel(); + JsonNode json = SerializationTools.parseJson(objectMapper, payload); + JsonNode idNode = json.get(NGSIConstants.JSON_LD_ID); + JsonNode type = json.get(NGSIConstants.JSON_LD_TYPE); + // null id and type check + if (idNode == null || type == null) { + throw new ResponseException(ErrorType.BadRequestData); + } + String id = idNode.asText(); + logger.debug("entity id " + id); + // check in-memory hashmap for id + synchronized (this.entityIds) { + if (this.entityIds.contains(id)) { + throw new ResponseException(ErrorType.AlreadyExists); + } + this.entityIds.add(id); + } + String now = SerializationTools.formatter.format(Instant.now()); + setTemporalProperties(json, now, now, false); + payload = objectMapper.writeValueAsString(json); + String withSysAttrs = payload; + // new Thread() { + // public void run() { + removeTemporalProperties(json); // remove createdAt/modifiedAt fields informed by the user + String entityWithoutSysAttrs; + try { + entityWithoutSysAttrs = objectMapper.writeValueAsString(json); + pushToDB(id, withSysAttrs, entityWithoutSysAttrs, objectMapper.writeValueAsString(getKeyValueEntity(json))); + } catch (JsonProcessingException e) { + // TODO Auto-generated catch block + logger.error(e); + } + + // }; + // }.start(); + new Thread() { + public void run() { + + try { + registerContext(id.getBytes(NGSIConstants.ENCODE_FORMAT), + withSysAttrs.getBytes(NGSIConstants.ENCODE_FORMAT)); + operations.pushToKafka(producerChannels.createWriteChannel(), + id.getBytes(NGSIConstants.ENCODE_FORMAT), + withSysAttrs.getBytes(NGSIConstants.ENCODE_FORMAT)); + } catch (UnsupportedEncodingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (URISyntaxException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (ResponseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + // TODO use or remove ... why is the check below commented + + }; + }.start(); + + /* + * // write to ENTITY topic after ENTITY_CREATE success. + * operations.pushToKafka(this.producerChannels.entityWriteChannel(), + * id.asText().getBytes(NGSIConstants.ENCODE_FORMAT), + * payload.getBytes(NGSIConstants.ENCODE_FORMAT)); + * + * // write to ENTITY_WITHOUT_SYSATTRS topic + * operations.pushToKafka(this.producerChannels. + * entityWithoutSysAttrsWriteChannel(), + * id.asText().getBytes(NGSIConstants.ENCODE_FORMAT), + * entityWithoutSysAttrs.getBytes(NGSIConstants.ENCODE_FORMAT)); // write to + * KVENTITY topic + * operations.pushToKafka(this.producerChannels.kvEntityWriteChannel(), + * id.asText().getBytes(NGSIConstants.ENCODE_FORMAT), + * objectMapper.writeValueAsBytes(getKeyValueEntity(json))); + */ + + logger.debug("createMessage() :: completed"); + return id; + } + + private void pushToDB(String key, String payload, String withoutSysAttrs, String kv) { + boolean success = false; + while (!success) { + try { + logger.debug("Received message: " + payload); + logger.trace("Writing data..."); + if (storageWriterDao != null && storageWriterDao.storeEntity(key, payload, withoutSysAttrs, kv)) { + + logger.trace("Writing is complete"); + } + success = true; + } catch (SQLTransientConnectionException e) { + logger.warn("SQL Exception attempting retry"); + Random random = new Random(); + int randomNumber = random.nextInt(4000) + 500; + try { + Thread.sleep(randomNumber); + } catch (InterruptedException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + } + } + // removeId on failure + + } + + // public String createMessageTest(String payload) throws ResponseException { + // JsonNode json = SerializationTools.parseJson(objectMapper, payload); + // JsonNode id = json.get(NGSIConstants.QUERY_PARAMETER_ID); + // operations.pushToKafka(this.producerChannels.entityWriteChannel(), + // id.asText().getBytes(), payload.getBytes()); + // return id.asText(); + // } + + public JsonNode getKeyValueEntity(JsonNode json) { + ObjectNode kvJsonObject = objectMapper.createObjectNode(); + Iterator> iter = json.fields(); + while (iter.hasNext()) { + Map.Entry entry = iter.next(); + if (entry.getKey().equals(NGSIConstants.JSON_LD_ID) || entry.getKey().equals(NGSIConstants.JSON_LD_TYPE)) { + kvJsonObject.set(entry.getKey(), entry.getValue()); + } else if (entry.getValue().isArray()) { + ArrayNode values = objectMapper.createArrayNode(); + Iterator it = entry.getValue().elements(); + while (it.hasNext()) { + ObjectNode attrObj = (ObjectNode) it.next(); + if (attrObj.has(NGSIConstants.JSON_LD_VALUE)) { // common members like createdAt do not have + // hasValue/hasObject + values.add(entry.getValue()); + } else if (attrObj.has(NGSIConstants.NGSI_LD_HAS_VALUE)) { + values.add(attrObj.get(NGSIConstants.NGSI_LD_HAS_VALUE)); + } else if (attrObj.has(NGSIConstants.NGSI_LD_HAS_OBJECT) + && attrObj.get(NGSIConstants.NGSI_LD_HAS_OBJECT).isArray() + && attrObj.get(NGSIConstants.NGSI_LD_HAS_OBJECT).get(0).has(NGSIConstants.JSON_LD_ID)) { + values.add(attrObj.get(NGSIConstants.NGSI_LD_HAS_OBJECT).get(0).get(NGSIConstants.JSON_LD_ID)); + } + } + if (values.size() == 1) { + kvJsonObject.set(entry.getKey(), values.get(0)); + } else { + kvJsonObject.set(entry.getKey(), values); + } + + } + } + return kvJsonObject; + } + + private void setTemporalProperties(JsonNode jsonNode, String createdAt, String modifiedAt, boolean rootOnly) { + if (!jsonNode.isObject()) { + return; + } + ObjectNode objectNode = (ObjectNode) jsonNode; + if (!createdAt.isEmpty()) { + objectNode.remove(NGSIConstants.NGSI_LD_CREATED_AT); + objectNode.putArray(NGSIConstants.NGSI_LD_CREATED_AT).addObject() + .put(NGSIConstants.JSON_LD_TYPE, NGSIConstants.NGSI_LD_DATE_TIME) + .put(NGSIConstants.JSON_LD_VALUE, createdAt); + } + if (!modifiedAt.isEmpty()) { + objectNode.remove(NGSIConstants.NGSI_LD_MODIFIED_AT); + objectNode.putArray(NGSIConstants.NGSI_LD_MODIFIED_AT).addObject() + .put(NGSIConstants.JSON_LD_TYPE, NGSIConstants.NGSI_LD_DATE_TIME) + .put(NGSIConstants.JSON_LD_VALUE, modifiedAt); + } + if (rootOnly) { + return; + } + + Iterator> iter = objectNode.fields(); + while (iter.hasNext()) { + Map.Entry entry = iter.next(); + if (entry.getValue().isArray() && entry.getValue().has(0) && entry.getValue().get(0).isObject()) { + ObjectNode attrObj = (ObjectNode) entry.getValue().get(0); + // add createdAt/modifiedAt only to properties, geoproperties and relationships + if (attrObj.has(NGSIConstants.JSON_LD_TYPE) && attrObj.get(NGSIConstants.JSON_LD_TYPE).isArray() + && attrObj.get(NGSIConstants.JSON_LD_TYPE).has(0) + && attrObj.get(NGSIConstants.JSON_LD_TYPE).get(0).asText().matches(regexNgsildAttributeTypes)) { + setTemporalProperties(attrObj, createdAt, modifiedAt, rootOnly); + } + } + } + } + + private void removeTemporalProperties(JsonNode jsonNode) { + if (!jsonNode.isObject()) { + return; + } + ObjectNode objectNode = (ObjectNode) jsonNode; + objectNode.remove(NGSIConstants.NGSI_LD_CREATED_AT); + objectNode.remove(NGSIConstants.NGSI_LD_MODIFIED_AT); + + String regexNgsildAttributeTypes = new String(NGSIConstants.NGSI_LD_PROPERTY + "|" + + NGSIConstants.NGSI_LD_RELATIONSHIP + "|" + NGSIConstants.NGSI_LD_GEOPROPERTY); + Iterator> iter = objectNode.fields(); + while (iter.hasNext()) { + Map.Entry entry = iter.next(); + if (entry.getValue().isArray() && entry.getValue().has(0) && entry.getValue().get(0).isObject()) { + ObjectNode attrObj = (ObjectNode) entry.getValue().get(0); + // add createdAt/modifiedAt only to properties, geoproperties and relationships + if (attrObj.has(NGSIConstants.JSON_LD_TYPE) && attrObj.get(NGSIConstants.JSON_LD_TYPE).isArray() + && attrObj.get(NGSIConstants.JSON_LD_TYPE).has(0) + && attrObj.get(NGSIConstants.JSON_LD_TYPE).get(0).asText().matches(regexNgsildAttributeTypes)) { + removeTemporalProperties(attrObj); + } + } + } + } + + /** + * Method to update a existing Entity in the system/kafka topic + * + * @param entityId - id of entity to be updated + * @param payload - jsonld message containing fileds to be updated with updated + * values + * @return RestResponse + * @throws ResponseException + * @throws IOException + */ + public UpdateResult updateMessage(String entityId, String payload) throws ResponseException, Exception { + logger.trace("updateMessage() :: started"); + // get message channel for ENTITY_UPDATE topic + MessageChannel messageChannel = producerChannels.updateWriteChannel(); + String entityBody = validateIdAndGetBody(entityId); + /* + * String payloadResolved=contextResolver.applyContext(payload); + * System.out.println(payloadResolved); String + * original=contextResolver.applyContext(new String(originalJson), + * contextResolver.getContext(entityId)); + * System.out.println("origial :: "+original); + */ + // update fields + JsonNode updateNode = objectMapper.readTree(payload); + UpdateResult updateResult = this.updateFields(entityBody, updateNode, null); + // pubilsh merged message + // & check if anything is changed. + if (updateResult.getStatus()) { + if (directDB) { + String entityWithoutSysAttrs = new String(updateResult.getJsonWithoutSysAttrs()); + String withSysAttrs = new String(updateResult.getJson()); + try { + pushToDB(entityId, withSysAttrs, entityWithoutSysAttrs, + objectMapper.writeValueAsString(getKeyValueEntity(objectMapper.readTree(withSysAttrs)))); + } catch (JsonProcessingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + new Thread() { + public void run() { + try { + operations.pushToKafka(messageChannel, entityId.getBytes(NGSIConstants.ENCODE_FORMAT), + updateResult.getAppendedJsonFields().toString().getBytes()); + updateContext(entityId.getBytes(NGSIConstants.ENCODE_FORMAT), updateResult.getJson()); + } catch (UnsupportedEncodingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (ResponseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + }; + }.start(); + } + logger.trace("updateMessage() :: completed"); + return updateResult; + } + + private String validateIdAndGetBody(String entityId) throws ResponseException { + // null id check + if (entityId == null) { + throw new ResponseException(ErrorType.BadRequestData); + } + // get entity details from in-memory hashmap. + synchronized (this.entityIds) { + if (!this.entityIds.contains(entityId)) { + throw new ResponseException(ErrorType.NotFound); + } + } + String entityBody = null; + if (directDB) { + entityBody = this.entityInfoDAO.getEntity(entityId); + } + return entityBody; + } + + /** + * Method to append fields in existing Entity in system/kafka topic + * + * @param entityId - id of entity to be appended + * @param payload - jsonld message containing fileds to be appended + * @return AppendResult + * @throws ResponseException + * @throws IOException + */ + public AppendResult appendMessage(String entityId, String payload, String overwriteOption) + throws ResponseException, Exception { + logger.trace("appendMessage() :: started"); + // get message channel for ENTITY_APPEND topic + MessageChannel messageChannel = producerChannels.appendWriteChannel(); + // payload validation + if (entityId == null) { + throw new ResponseException(ErrorType.BadRequestData); + } + // get entity details + String entityBody = validateIdAndGetBody(entityId); + AppendResult appendResult = this.appendFields(entityBody, objectMapper.readTree(payload), overwriteOption); + // get entity from ENTITY topic. + // pubilsh merged message + // check if anything is changed + if (appendResult.getStatus()) { + if (directDB) { + String entityWithoutSysAttrs = new String(appendResult.getJsonWithoutSysAttrs()); + String withSysAttrs = new String(appendResult.getJson()); + try { + pushToDB(entityId, withSysAttrs, entityWithoutSysAttrs, + objectMapper.writeValueAsString(getKeyValueEntity(objectMapper.readTree(withSysAttrs)))); + } catch (JsonProcessingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + new Thread() { + public void run() { + try { + operations.pushToKafka(messageChannel, entityId.getBytes(NGSIConstants.ENCODE_FORMAT), + objectMapper.writeValueAsBytes(appendResult.getJsonToAppend())); + } catch (UnsupportedEncodingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (JsonProcessingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (ResponseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + }; + }.start(); + + /* + * // write to ENTITY topic after ENTITY_APPEND success. + * operations.pushToKafka(this.producerChannels.entityWriteChannel(), + * entityId.getBytes(NGSIConstants.ENCODE_FORMAT), finalJson); // write to + * ENTITY_WITHOUT_SYSATTRS topic operations.pushToKafka(this.producerChannels. + * entityWithoutSysAttrsWriteChannel(), + * entityId.getBytes(NGSIConstants.ENCODE_FORMAT), + * appendResult.getJsonWithoutSysAttrs()); + * operations.pushToKafka(this.producerChannels.kvEntityWriteChannel(), + * entityId.getBytes(NGSIConstants.ENCODE_FORMAT), + * objectMapper.writeValueAsBytes(getKeyValueEntity(appendResult.getFinalNode()) + * )); + */ + } + logger.trace("appendMessage() :: completed"); + return appendResult; + } + + public boolean deleteEntity(String entityId) throws ResponseException, Exception { + logger.trace("deleteEntity() :: started"); + // get message channel for ENTITY_DELETE topic + MessageChannel messageChannel = producerChannels.deleteWriteChannel(); + if (entityId == null) { + throw new ResponseException(ErrorType.BadRequestData); + } + // get entity details from in-memory hashmap + synchronized (this.entityIds) { + if (!this.entityIds.remove(entityId)) { + throw new ResponseException(ErrorType.NotFound); + } + if (directDB) { + storageWriterDao.store(DBConstants.DBTABLE_ENTITY, DBConstants.DBCOLUMN_DATA, entityId, null); + } + } + new Thread() { + public void run() { + try { + operations.pushToKafka(messageChannel, entityId.getBytes(NGSIConstants.ENCODE_FORMAT), + "{}".getBytes()); + } catch (UnsupportedEncodingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (ResponseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + }; + }.start(); + /* + * EntityDetails entityDetails = entityTopicMap.get(entityId); if (entityDetails + * == null) { throw new ResponseException(ErrorType.NotFound); } // get entity + * from entity topic byte[] originalJson = + * this.operations.getMessage(this.ENTITY_TOPIC, entityId, + * entityDetails.getPartition(), entityDetails.getOffset()); // check whether + * exists. if (originalJson == null) { throw new + * ResponseException(ErrorType.NotFound); } // TODO use or remove ... why is the + * check below commented boolean result = + * this.operations.pushToKafka(messageChannel, + * entityId.getBytes(NGSIConstants.ENCODE_FORMAT), originalJson); + * + * if (!result) { throw new ResponseException(ErrorType.KafkaWriteError); } + * + * operations.pushToKafka(this.producerChannels.entityWriteChannel(), + * entityId.getBytes(NGSIConstants.ENCODE_FORMAT), + * "null".getBytes(NGSIConstants.ENCODE_FORMAT)); + * operations.pushToKafka(this.producerChannels. + * entityWithoutSysAttrsWriteChannel(), + * entityId.getBytes(NGSIConstants.ENCODE_FORMAT), + * "null".getBytes(NGSIConstants.ENCODE_FORMAT)); + * operations.pushToKafka(this.producerChannels.kvEntityWriteChannel(), + * entityId.getBytes(NGSIConstants.ENCODE_FORMAT), + * "null".getBytes(NGSIConstants.ENCODE_FORMAT)); + * + */ logger.trace("deleteEntity() :: completed"); + return true; + } + + public UpdateResult partialUpdateEntity(String entityId, String attrId, String payload) + throws ResponseException, Exception { + logger.trace("partialUpdateEntity() :: started"); + // get message channel for ENTITY_APPEND topic + MessageChannel messageChannel = producerChannels.updateWriteChannel(); + if (entityId == null) { + throw new ResponseException(ErrorType.BadRequestData); + } + // get entity details + String entityBody = validateIdAndGetBody(entityId); + // JsonNode originalJsonNode = objectMapper.readTree(originalJson); + + UpdateResult updateResult = this.updateFields(entityBody, objectMapper.readTree(payload), attrId); + // pubilsh merged message + // check if anything is changed. + if (updateResult.getStatus()) { + if (directDB) { + JsonNode json = updateResult.getFinalNode(); + String withSysAttrs = new String(updateResult.getJson()); + removeTemporalProperties(json); // remove createdAt/modifiedAt fields informed by the user + String entityWithoutSysAttrs; + try { + entityWithoutSysAttrs = new String(updateResult.getJsonWithoutSysAttrs()); + pushToDB(entityId, withSysAttrs, entityWithoutSysAttrs, + objectMapper.writeValueAsString(getKeyValueEntity(objectMapper.readTree(withSysAttrs)))); + } catch (JsonProcessingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + new Thread() { + public void run() { + try { + operations.pushToKafka(messageChannel, entityId.getBytes(NGSIConstants.ENCODE_FORMAT), + objectMapper.writeValueAsBytes(updateResult.getFinalNode())); + updateContext(entityId.getBytes(NGSIConstants.ENCODE_FORMAT), updateResult.getJson()); + } catch (UnsupportedEncodingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (ResponseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (JsonProcessingException e) { + e.printStackTrace(); + } + }; + }.start(); + + } + logger.trace("partialUpdateEntity() :: completed"); + return updateResult; + + } + + public boolean deleteAttribute(String entityId, String attrId,String datasetId,String deleteAll) throws ResponseException, Exception { + logger.trace("deleteAttribute() :: started"); + // get message channel for ENTITY_APPEND topic + MessageChannel messageChannel = producerChannels.deleteWriteChannel(); + if (entityId == null) { + throw new ResponseException(ErrorType.BadRequestData); + } + // get entity details from in-memory hashmap + String entityBody = validateIdAndGetBody(entityId); + + JsonNode finalJson = this.deleteFields(entityBody, attrId, datasetId, deleteAll); + String finalFullEntity = objectMapper.writeValueAsString(finalJson); + removeTemporalProperties(finalJson); + String entityWithoutSysAttrs = objectMapper.writeValueAsString(finalJson); + String kvEntity = objectMapper.writeValueAsString(getKeyValueEntity(finalJson)); + pushToDB(entityId, finalFullEntity, entityWithoutSysAttrs, kvEntity); + + // pubilsh updated message + new Thread() { + public void run() { + try { + operations.pushToKafka(messageChannel, entityId.getBytes(NGSIConstants.ENCODE_FORMAT), + finalFullEntity.getBytes()); + } catch (UnsupportedEncodingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (ResponseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + }; + }.start(); + + logger.trace("deleteAttribute() :: completed"); + return true; + } + + /** + * Method to merge/update fields in original Entitiy + * @param originalJsonObject + * @param jsonToUpdate + * @param attrId + * @return + * @throws Exception + * @throws ResponseException + */ + public UpdateResult updateFields(String originalJsonObject, JsonNode jsonToUpdate, String attrId) + throws Exception, ResponseException { + logger.trace("updateFields() :: started"); + String now = SerializationTools.formatter.format(Instant.now()); + JsonNode resultJson = objectMapper.createObjectNode(); + UpdateResult updateResult = new UpdateResult(jsonToUpdate, resultJson); + JsonNode node = objectMapper.readTree(originalJsonObject); + ObjectNode objectNode = (ObjectNode) node; + if (attrId != null) { + if (objectNode.get(attrId) == null) { + throw new ResponseException(ErrorType.NotFound, "Provided attribute is not present"); + } + JsonNode originalNode = ((ArrayNode) objectNode.get(attrId)).get(0); + if (((ObjectNode) originalNode).has(NGSIConstants.NGSI_LD_INSTANCE_ID)) { + ((ObjectNode) originalNode).remove(NGSIConstants.NGSI_LD_INSTANCE_ID); + } + JsonNode innerNode = ((ArrayNode) objectNode.get(attrId)); + ArrayNode myArray = (ArrayNode) innerNode; + String availableDatasetId = null; + for (int i = 0; i < myArray.size(); i++) { + if (myArray.get(i).has(NGSIConstants.NGSI_LD_DATA_SET_ID)) { + String payloadDatasetId = myArray.get(i).get(NGSIConstants.NGSI_LD_DATA_SET_ID).get(0) + .get(NGSIConstants.JSON_LD_ID).asText(); + if (jsonToUpdate.has(NGSIConstants.NGSI_LD_DATA_SET_ID)) { + String datasetId = jsonToUpdate.get(NGSIConstants.NGSI_LD_DATA_SET_ID).get(0) + .get(NGSIConstants.JSON_LD_ID).asText(); + if (payloadDatasetId.equalsIgnoreCase(datasetId)) { + availableDatasetId = "available"; + setFieldValue(jsonToUpdate.fieldNames(), ((ArrayNode) objectNode.get(attrId)), jsonToUpdate, + updateResult, i); + } + } else { + if (payloadDatasetId.equals(NGSIConstants.DEFAULT_DATA_SET_ID)) { + setFieldValue(jsonToUpdate.fieldNames(), ((ArrayNode) objectNode.get(attrId)), jsonToUpdate, + updateResult, i); + } + } + } else { + if (jsonToUpdate.has(NGSIConstants.NGSI_LD_DATA_SET_ID)) { + ((ObjectNode) innerNode.get(i)).putArray(NGSIConstants.NGSI_LD_DATA_SET_ID).addObject() + .put(NGSIConstants.JSON_LD_ID, NGSIConstants.DEFAULT_DATA_SET_ID); + } else { + ((ObjectNode) innerNode.get(i)).putArray(NGSIConstants.NGSI_LD_DATA_SET_ID).addObject() + .put(NGSIConstants.JSON_LD_ID, NGSIConstants.DEFAULT_DATA_SET_ID); + setFieldValue(jsonToUpdate.fieldNames(), ((ArrayNode) objectNode.get(attrId)), jsonToUpdate, + updateResult, i); + } + } + } + if (jsonToUpdate.has(NGSIConstants.NGSI_LD_DATA_SET_ID)) { + if ((availableDatasetId == null) || (availableDatasetId.isEmpty())) { + throw new ResponseException(ErrorType.NotFound, "Provided datasetId is not present"); + } + } + } else { + Iterator it = jsonToUpdate.fieldNames(); + while (it.hasNext()) { + String field = it.next(); + // TOP level updates of context id or type are ignored + if (field.equalsIgnoreCase(NGSIConstants.JSON_LD_CONTEXT) + || field.equalsIgnoreCase(NGSIConstants.JSON_LD_ID) + || field.equalsIgnoreCase(NGSIConstants.JSON_LD_TYPE)) { + continue; + } + logger.trace("field: " + field); + if (node.has(field)) { + JsonNode originalNode = ((ArrayNode) objectNode.get(field)).get(0); + JsonNode attrNode = jsonToUpdate.get(field).get(0); + String createdAt = now; + + // keep original createdAt value if present in the original json + if ((originalNode instanceof ObjectNode) + && ((ObjectNode) originalNode).has(NGSIConstants.NGSI_LD_CREATED_AT) + && ((ObjectNode) originalNode).get(NGSIConstants.NGSI_LD_CREATED_AT).isArray()) { + createdAt = ((ObjectNode) ((ObjectNode) originalNode).get(NGSIConstants.NGSI_LD_CREATED_AT) + .get(0)).get(NGSIConstants.JSON_LD_VALUE).asText(); + } + setTemporalProperties(attrNode, createdAt, now, true); + + // TODO check if this should ever happen. 5.6.4.4 says BadRequest if AttrId is + // present ... + objectNode.replace(field, jsonToUpdate.get(field)); + ((ObjectNode) updateResult.getAppendedJsonFields()).set(field, jsonToUpdate.get(field)); + logger.trace("appended json fields: " + updateResult.getAppendedJsonFields().toString()); + updateResult.setStatus(true); + } else { + // throw new ResponseException(ErrorType.NotFound); + } + } + } + setTemporalProperties(node, "", now, true); // root only, modifiedAt only + updateResult.setJson(node.toString().getBytes(NGSIConstants.ENCODE_FORMAT)); + updateResult.setFinalNode(node); + removeTemporalProperties(node); + updateResult.setJsonWithoutSysAttrs(node.toString().getBytes(NGSIConstants.ENCODE_FORMAT)); + logger.trace("updateFields() :: completed"); + return updateResult; + } + + /** + * Method to merge/append fileds in original Entity + * + * @param originalJsonObject + * @param jsonToUpdate + * @return AppendResult + * @throws IOException + */ + public AppendResult appendFields(String originalJsonObject, JsonNode jsonToAppend, String overwriteOption) + throws Exception { + logger.trace("appendFields() :: started"); + String now = SerializationTools.formatter.format(Instant.now()); + JsonNode resultJson = objectMapper.createObjectNode(); + AppendResult appendResult = new AppendResult(jsonToAppend, resultJson); + JsonNode node = objectMapper.readTree(originalJsonObject); + ObjectNode objectNode = (ObjectNode) node; + Iterator it = jsonToAppend.fieldNames(); + while (it.hasNext()) { + String key = it.next(); + if (key.equalsIgnoreCase(NGSIConstants.JSON_LD_CONTEXT) || key.equalsIgnoreCase(NGSIConstants.JSON_LD_ID)) { + continue; + } + // remove if passed attribute have null value. + if (jsonToAppend.get(key).isNull()) { + objectNode.remove(key); + ((ObjectNode) appendResult.getAppendedJsonFields()).set(key, jsonToAppend.get(key)); + appendResult.setStatus(true); + continue; + } + // validation append payload attribute + /* + * if (!Validator.isValidAttribute(jsonToAppend.get(key))) { ((ObjectNode) + * appendResult.getAppendedJsonFields()).set(key, jsonToAppend.get(key)); + * appendResult.setStatus(true); continue; } + */ + + if ((objectNode.has(key) && !appendOverwriteFlag.equalsIgnoreCase(overwriteOption)) + || !objectNode.has(key)) { + if (jsonToAppend.get(key).isArray() && jsonToAppend.get(key).has(0)) { + // TODO: should we keep the createdAt value if attribute already exists? + // (overwrite operation) => if (objectNode.has(key)) ... + JsonNode attrNode = jsonToAppend.get(key).get(0); + setTemporalProperties(attrNode, now, now, true); + } + objectNode.replace(key, jsonToAppend.get(key)); + ((ObjectNode) appendResult.getAppendedJsonFields()).set(key, jsonToAppend.get(key)); + appendResult.setStatus(true); + } + } + setTemporalProperties(node, "", now, true); // root only, modifiedAt only + appendResult.setJson(node.toString().getBytes(NGSIConstants.ENCODE_FORMAT)); + + removeTemporalProperties(node); + appendResult.setJsonWithoutSysAttrs(node.toString().getBytes(NGSIConstants.ENCODE_FORMAT)); + appendResult.setFinalNode(node); + logger.trace("appendFields() :: completed"); + return appendResult; + } + + /** + * Method to delete attributes from original Entity + * + * @param originalJsonObject + * @param attrId + * @return + * @throws IOException + * @throws ResponseException + */ + public JsonNode deleteFields(String originalJsonObject, String attrId, String datasetId, String deleteAll) throws Exception, ResponseException { + logger.trace("deleteFields() :: started"); + JsonNode node = objectMapper.readTree(originalJsonObject); + ObjectNode objectNode = (ObjectNode) node; + JsonNode innerNode = ((ArrayNode) objectNode.get(attrId)); + ArrayNode myArray = (ArrayNode) innerNode; + String availableDatasetId = null; + if (objectNode.has(attrId)) { + //below condition remove the existing datasetId + if (datasetId != null && !datasetId.isEmpty()) { + for (int i = 0; i < myArray.size(); i++) { + if (myArray.get(i).has(NGSIConstants.NGSI_LD_DATA_SET_ID)) { + String payloadDatasetId = myArray.get(i).get(NGSIConstants.NGSI_LD_DATA_SET_ID).get(0) + .get(NGSIConstants.JSON_LD_ID).asText(); + if (payloadDatasetId.equals(datasetId)) { + availableDatasetId = "available"; + myArray.remove(i); + } + } + } + if ((availableDatasetId == null) || (availableDatasetId.isEmpty())) { + throw new ResponseException(ErrorType.NotFound, "Provided datasetId is not present"); + } + // below condition remove all the datasetId + } else if (deleteAll != null && !deleteAll.isEmpty()) { + if (deleteAll.equals("true")) { + if (objectNode.has(attrId)) { + objectNode.remove(attrId); + } else { + throw new ResponseException(ErrorType.NotFound); + } + } else { + throw new ResponseException(ErrorType.InvalidRequest, "request is not valid"); + } + } else { + // below condition remove the default datasetId + for (int i = 0; i < myArray.size(); i++) { + if (myArray.get(i).has(NGSIConstants.NGSI_LD_DATA_SET_ID)) { + String payloadDatasetId = myArray.get(i).get(NGSIConstants.NGSI_LD_DATA_SET_ID).get(0) + .get(NGSIConstants.JSON_LD_ID).asText(); + if (payloadDatasetId.equals(NGSIConstants.DEFAULT_DATA_SET_ID)) { + availableDatasetId = "available"; + myArray.remove(i); + } + } else { + availableDatasetId = "NotAvailable"; + myArray.remove(i); + } + } + if ((availableDatasetId == null) || (availableDatasetId.isEmpty())) { + throw new ResponseException(ErrorType.NotFound, "Default attribute instance is not present"); + } + } + } else { + throw new ResponseException(ErrorType.NotFound, "Attribute is not present"); + } + logger.trace("deleteFields() :: completed"); + return objectNode; + } + + public boolean registerContext(byte[] id, byte[] payload) + throws URISyntaxException, IOException, ResponseException { + logger.trace("registerContext() :: started"); + MessageChannel messageChannel = producerChannels.contextRegistryWriteChannel(); + CSourceRegistration contextRegistryPayload = this.getCSourceRegistrationFromJson(payload); + this.operations.pushToKafka(messageChannel, id, DataSerializer.toJson(contextRegistryPayload).getBytes()); + logger.trace("registerContext() :: completed"); + return true; + } + + private void updateContext(byte[] id, byte[] payload) throws ResponseException { + logger.trace("updateContext() :: started"); + MessageChannel messageChannel = producerChannels.contextUpdateWriteChannel(); + this.operations.pushToKafka(messageChannel, id, payload); + logger.trace("updateContext() :: completed"); + } + + private CSourceRegistration getCSourceRegistrationFromJson(byte[] payload) throws URISyntaxException, IOException { + logger.trace("getCSourceRegistrationFromJson() :: started"); + CSourceRegistration csourceRegistration = new CSourceRegistration(); + List information = new ArrayList(); + Information info = new Information(); + List entities = info.getEntities(); + Entity entity = DataSerializer.getEntity(objectMapper.writeValueAsString(objectMapper.readTree(payload))); + + // Entity to CSourceRegistration conversion. + csourceRegistration.setId(entity.getId()); + csourceRegistration.setEndpoint(MicroServiceUtils.getGatewayURL(eurekaClient)); + // location node + GeoProperty geoLocationProperty = entity.getLocation(); + if (geoLocationProperty != null) { + csourceRegistration.setLocation(getCoveringGeoValue(geoLocationProperty)); + } + + // Information node + Set propertiesList = entity.getProperties().stream().map(Property::getIdString) + .collect(Collectors.toSet()); + + Set relationshipsList = entity.getRelationships().stream().map(Relationship::getIdString) + .collect(Collectors.toSet()); + + entities.add(new EntityInfo(entity.getId(), null, entity.getType())); + + info.setProperties(propertiesList); + info.setRelationships(relationshipsList); + information.add(info); + csourceRegistration.setInformation(information); + + // location node. + + TimeInterval timestamp = new TimeInterval(); + timestamp.setStart(new Date().getTime()); + csourceRegistration.setTimestamp(timestamp); + logger.trace("getCSourceRegistrationFromJson() :: completed"); + return csourceRegistration; + } + + private Geometry getCoveringGeoValue(GeoProperty geoLocationProperty) { + // TODO should be done better to cover the actual area + return geoLocationProperty.getEntries().values().iterator().next().getGeoValue(); + } + + public URI getResourceURL(String resource) throws URISyntaxException { + logger.trace("getResourceURL() :: started"); + URI uri = MicroServiceUtils.getGatewayURL(eurekaClient); + logger.trace("getResourceURL() :: completed"); + return new URI(uri.toString() + "/" + resource); + } + + /* + * @KafkaListener(topics = "${entity.topic}", groupId = "entitymanager") public + * void updateTopicDetails(Message message) throws IOException { + * logger.trace("updateTopicDetails() :: started"); String key = + * operations.getMessageKey(message); int partitionId = (int) + * message.getHeaders().get(KafkaHeaders.RECEIVED_PARTITION_ID); long offset = + * (long) message.getHeaders().get(KafkaHeaders.OFFSET); JsonNode entityJsonBody + * = objectMapper.readTree(message.getPayload()); boolean isDeletedMsg = + * entityJsonBody.isNull(); if (isDeletedMsg) { entityTopicMap.remove(key); } + * else { entityTopicMap.put(key, new EntityDetails(key, partitionId, offset)); + * } logger.trace("updateTopicDetails() :: completed"); } + */ + + public void validateEntity(String payload, HttpServletRequest request) throws ResponseException { + Entity entity; + try { + entity = DataSerializer.getEntity(payload); + } catch (JsonParseException e) { + throw new ResponseException(ErrorType.BadRequestData, e.getMessage()); + } + List rules = new ArrayList<>(); + rules.add(new IdValidationRule()); + rules.add(new TypeValidationRule()); + rules.add(new PropertyValidatioRule()); + rules.add(new RelationshipValidationRule()); + + for (ValidationRules rule : rules) { + rule.validateEntity(entity, request); + } + } + + public BatchResult createMultipleMessage(String payload) throws ResponseException { + + try { + BatchResult result = new BatchResult(); + JsonNode myTree = objectMapper.readTree(payload); + if (!myTree.isArray()) { + throw new ResponseException(ErrorType.InvalidRequest, + "This interface only supports arrays of entities"); + } + ArrayNode myArray = (ArrayNode) myTree; + if (maxCreateBatch != -1 && myArray.size() > maxCreateBatch) { + throw new ResponseException(ErrorType.RequestEntityTooLarge, + "Maximum allowed number of entities for this operation is " + maxCreateBatch); + } + Iterator it = myArray.iterator(); + while (it.hasNext()) { + JsonNode next = it.next(); + + try { + result.addSuccess(createMessage(objectMapper.writeValueAsString(next))); + } catch (Exception e) { + + String entityId = "NOT AVAILABLE"; + if (next.hasNonNull(NGSIConstants.JSON_LD_ID)) { + entityId = next.get(NGSIConstants.JSON_LD_ID).asText(); + } + RestResponse response; + if (e instanceof ResponseException) { + response = new RestResponse((ResponseException) e); + } else { + response = new RestResponse(ErrorType.InternalError, e.getLocalizedMessage()); + } + + result.addFail(new BatchFailure(entityId, response)); + } + + } + return result; + } catch (IOException e) { + throw new ResponseException(ErrorType.BadRequestData, e.getMessage()); + } + + } + + public BatchResult deleteMultipleMessage(String payload) throws ResponseException { + try { + BatchResult result = new BatchResult(); + JsonNode myTree = objectMapper.readTree(payload); + if (!myTree.isArray()) { + throw new ResponseException(ErrorType.InvalidRequest, + "This interface only supports arrays of entities"); + } + ArrayNode myArray = (ArrayNode) myTree; + if (maxDeleteBatch != -1 && myArray.size() > maxDeleteBatch) { + throw new ResponseException(ErrorType.RequestEntityTooLarge, + "Maximum allowed number of entities for this operation is " + maxDeleteBatch); + } + + Iterator it = myArray.iterator(); + while (it.hasNext()) { + JsonNode next = it.next(); + String entityId = next.asText(); + try { + if (deleteEntity(entityId)) { + result.addSuccess(entityId); + } + } catch (Exception e) { + RestResponse response; + if (e instanceof ResponseException) { + response = new RestResponse((ResponseException) e); + } else { + response = new RestResponse(ErrorType.InternalError, e.getLocalizedMessage()); + } + + result.addFail(new BatchFailure(entityId, response)); + } + } + return result; + } catch (IOException e) { + throw new ResponseException(ErrorType.BadRequestData, e.getMessage()); + } + + } + + public BatchResult updateMultipleMessage(String resolved) throws ResponseException { + try { + BatchResult result = new BatchResult(); + JsonNode myTree = objectMapper.readTree(resolved); + if (!myTree.isArray()) { + throw new ResponseException(ErrorType.InvalidRequest, + "This interface only supports arrays of entities"); + } + ArrayNode myArray = (ArrayNode) myTree; + if (maxUpdateBatch != -1 && myArray.size() > maxUpdateBatch) { + throw new ResponseException(ErrorType.RequestEntityTooLarge, + "Maximum allowed number of entities for this operation is " + maxUpdateBatch); + } + Iterator it = myArray.iterator(); + while (it.hasNext()) { + JsonNode next = it.next(); + String entityId = "NOT AVAILABLE"; + if (next.hasNonNull(NGSIConstants.JSON_LD_ID)) { + entityId = next.get(NGSIConstants.JSON_LD_ID).asText(); + } else { + result.addFail(new BatchFailure(entityId, + new RestResponse(ErrorType.BadRequestData, "No Entity Id provided"))); + continue; + } + try { + AppendResult updateResult = appendMessage(entityId, objectMapper.writeValueAsString(next), null); + if (updateResult.getStatus()) { + result.addSuccess(entityId); + } else { + result.addFail(new BatchFailure(entityId, new RestResponse(ErrorType.MultiStatus, + objectMapper.writeValueAsString(updateResult.getJsonToAppend()) + " was not added"))); + } + + } catch (Exception e) { + + RestResponse response; + if (e instanceof ResponseException) { + response = new RestResponse((ResponseException) e); + } else { + response = new RestResponse(ErrorType.InternalError, e.getLocalizedMessage()); + } + + result.addFail(new BatchFailure(entityId, response)); + } + + } + return result; + } catch (IOException e) { + throw new ResponseException(ErrorType.BadRequestData, e.getMessage()); + } + } + + public BatchResult upsertMultipleMessage(String resolved) throws ResponseException { + try { + BatchResult result = new BatchResult(); + JsonNode myTree = objectMapper.readTree(resolved); + if (!myTree.isArray()) { + throw new ResponseException(ErrorType.InvalidRequest, + "This interface only supports arrays of entities"); + } + ArrayNode myArray = (ArrayNode) myTree; + if (maxUpsertBatch != -1 && myArray.size() > maxUpsertBatch) { + throw new ResponseException(ErrorType.RequestEntityTooLarge, + "Maximum allowed number of entities for this operation is " + maxUpsertBatch); + } + Iterator it = myArray.iterator(); + while (it.hasNext()) { + JsonNode next = it.next(); + String entityId = "NOT AVAILABLE"; + if (next.hasNonNull(NGSIConstants.JSON_LD_ID)) { + entityId = next.get(NGSIConstants.JSON_LD_ID).asText(); + } else { + result.addFail(new BatchFailure(entityId, + new RestResponse(ErrorType.BadRequestData, "No Entity Id provided"))); + continue; + } + String entityString = objectMapper.writeValueAsString(next); + try { + + result.addSuccess(createMessage(entityString)); + + } catch (Exception e) { + + RestResponse response; + if (e instanceof ResponseException) { + ResponseException responseException = ((ResponseException) e); + if (responseException.getHttpStatus().equals(HttpStatus.CONFLICT)) { + AppendResult updateResult; + try { + updateResult = appendMessage(entityId, entityString, null); + + if (updateResult.getStatus()) { + result.addSuccess(entityId); + } else { + result.addFail(new BatchFailure(entityId, + new RestResponse(ErrorType.MultiStatus, + objectMapper.writeValueAsString(updateResult.getJsonToAppend()) + + " was not added"))); + } + } catch (Exception e1) { + + if (e1 instanceof ResponseException) { + response = new RestResponse((ResponseException) e1); + } else { + response = new RestResponse(ErrorType.InternalError, e1.getLocalizedMessage()); + } + + result.addFail(new BatchFailure(entityId, response)); + } + } else { + response = new RestResponse((ResponseException) e); + result.addFail(new BatchFailure(entityId, response)); + } + + } else { + response = new RestResponse(ErrorType.InternalError, e.getLocalizedMessage()); + result.addFail(new BatchFailure(entityId, response)); + } + + } + + } + return result; + } catch (IOException e) { + throw new ResponseException(ErrorType.BadRequestData, e.getMessage()); + } + } + + /** + * this method use for update the value of jsonNode. + * @param it + * @param innerNode + * @param jsonToUpdate + * @param updateResult + * @param i + */ + private void setFieldValue(Iterator it, JsonNode innerNode, JsonNode jsonToUpdate, UpdateResult updateResult, + int i) { + while (it.hasNext()) { + String field = it.next(); + // TOP level updates of context id or type are ignored + if (field.equalsIgnoreCase(NGSIConstants.JSON_LD_CONTEXT) + || field.equalsIgnoreCase(NGSIConstants.JSON_LD_ID) + || field.equalsIgnoreCase(NGSIConstants.JSON_LD_TYPE)) { + continue; + } + logger.trace("field: " + field); + // logger.trace("attrId: " + attrId); + if (innerNode != null) { + ((ObjectNode) innerNode.get(i)).replace(field, jsonToUpdate.get(field)); + logger.trace("appended json fields (partial): " + updateResult.getAppendedJsonFields().toString()); + updateResult.setStatus(true); + } + } + } +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/services/StorageWriterDAO.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/services/StorageWriterDAO.java new file mode 100644 index 0000000000000000000000000000000000000000..dfe5f2a6a7ce9e4d4aad75a3b79ae00ced575b14 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/services/StorageWriterDAO.java @@ -0,0 +1,170 @@ +package eu.neclab.ngsildbroker.entityhandler.services; + +import java.sql.SQLException; +import java.sql.SQLTransientConnectionException; + +import javax.annotation.PostConstruct; +import javax.sql.DataSource; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.dao.DataAccessException; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.DataSourceTransactionManager; +import org.springframework.stereotype.Repository; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.support.TransactionCallback; +import org.springframework.transaction.support.TransactionTemplate; + +import com.google.gson.Gson; + +import eu.neclab.ngsildbroker.commons.constants.DBConstants; +import eu.neclab.ngsildbroker.commons.datatypes.TemporalEntityStorageKey; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; + +@Repository("emstorage") +@ConditionalOnProperty(value = "writer.enabled", havingValue = "true", matchIfMissing = false) +public class StorageWriterDAO { + + private final static Logger logger = LogManager.getLogger(StorageWriterDAO.class); +// public static final Gson GSON = DataSerializer.GSON; + + @Autowired + private JdbcTemplate writerJdbcTemplate; + + @Autowired + private DataSource writerDataSource; + + private TransactionTemplate writerTransactionTemplate; + private JdbcTemplate writerJdbcTemplateWithTransaction; + + @PostConstruct + public void init() { + writerJdbcTemplate.execute("SELECT 1"); // create connection pool and connect to database + + // https://gist.github.com/mdellabitta/1444003 + DataSourceTransactionManager transactionManager = new DataSourceTransactionManager(writerDataSource); + writerJdbcTemplateWithTransaction = new JdbcTemplate(transactionManager.getDataSource()); + writerTransactionTemplate = new TransactionTemplate(transactionManager); + } + + public boolean store(String tableName, String columnName, String key, String value) { + try { + String sql; + int n = 0; + if (value != null && !value.equals("null")) { + sql = "INSERT INTO " + tableName + " (id, " + columnName + + ") VALUES (?, ?::jsonb) ON CONFLICT(id) DO UPDATE SET " + columnName + " = EXCLUDED." + + columnName; + n = writerJdbcTemplate.update(sql, key, value); + } else { + sql = "DELETE FROM " + tableName + " WHERE id = ?"; + n = writerJdbcTemplate.update(sql, key); + } + logger.trace("Rows affected: " + Integer.toString(n)); + return true; // (n>0); + } catch (Exception e) { + logger.error("Exception ::", e); + e.printStackTrace(); + } + return false; + } + + public boolean storeEntity(String key, String value, String valueWithoutSysAttrs, String kvValue) + throws SQLTransientConnectionException { + String sql; + int n = 0; + if (value != null && !value.equals("null")) { + sql = "INSERT INTO " + DBConstants.DBTABLE_ENTITY + " (id, " + DBConstants.DBCOLUMN_DATA + ", " + + DBConstants.DBCOLUMN_DATA_WITHOUT_SYSATTRS + ", " + DBConstants.DBCOLUMN_KVDATA + + ") VALUES (?, ?::jsonb, ?::jsonb, ?::jsonb) ON CONFLICT(id) DO UPDATE SET (" + + DBConstants.DBCOLUMN_DATA + ", " + DBConstants.DBCOLUMN_DATA_WITHOUT_SYSATTRS + ", " + + DBConstants.DBCOLUMN_KVDATA + ") = (EXCLUDED." + DBConstants.DBCOLUMN_DATA + ", EXCLUDED." + + DBConstants.DBCOLUMN_DATA_WITHOUT_SYSATTRS + ", EXCLUDED." + DBConstants.DBCOLUMN_KVDATA + ")"; + n = writerJdbcTemplate.update(sql, key, value, valueWithoutSysAttrs, kvValue); + } else { + sql = "DELETE FROM " + DBConstants.DBTABLE_ENTITY + " WHERE id = ?"; + n = writerJdbcTemplate.update(sql, key); + } + logger.trace("Rows affected: " + Integer.toString(n)); + return true; // (n>0); + + } + + public boolean storeTemporalEntity(String key, String value) throws SQLException { + try { + + TemporalEntityStorageKey tesk = DataSerializer.getTemporalEntityStorageKey(key); + + String entityId = tesk.getEntityId(); + String entityType = tesk.getEntityType(); + String entityCreatedAt = tesk.getEntityCreatedAt(); + String entityModifiedAt = tesk.getEntityModifiedAt(); + + String attributeId = tesk.getAttributeId(); + String instanceId = tesk.getInstanceId(); + Boolean overwriteOp = tesk.getOverwriteOp(); + + Integer n = 0; + + if (!value.equals("null")) { + // https://gist.github.com/mdellabitta/1444003 + n = writerTransactionTemplate.execute(new TransactionCallback() { + @Override + public Integer doInTransaction(TransactionStatus status) { + String sql; + Integer tn = 0; + if (entityId != null && entityType != null && entityCreatedAt != null + && entityModifiedAt != null) { + sql = "INSERT INTO " + DBConstants.DBTABLE_TEMPORALENTITY + + " (id, type, createdat, modifiedat) VALUES (?, ?, ?::timestamp, ?::timestamp) ON CONFLICT(id) DO UPDATE SET type = EXCLUDED.type, createdat = EXCLUDED.createdat, modifiedat = EXCLUDED.modifiedat"; + tn = writerJdbcTemplateWithTransaction.update(sql, entityId, entityType, entityCreatedAt, + entityModifiedAt); + } + + if (entityId != null && attributeId != null) { + if (overwriteOp != null && overwriteOp) { + sql = "DELETE FROM " + DBConstants.DBTABLE_TEMPORALENTITY_ATTRIBUTEINSTANCE + + " WHERE temporalentity_id = ? AND attributeid = ?"; + tn += writerJdbcTemplateWithTransaction.update(sql, entityId, attributeId); + } + sql = "INSERT INTO " + DBConstants.DBTABLE_TEMPORALENTITY_ATTRIBUTEINSTANCE + + " (temporalentity_id, attributeid, data) VALUES (?, ?, ?::jsonb) ON CONFLICT(temporalentity_id, attributeid, instanceid) DO UPDATE SET data = EXCLUDED.data"; + tn += writerJdbcTemplateWithTransaction.update(sql, entityId, attributeId, value); + // update modifiedat field in temporalentity + sql = "UPDATE " + DBConstants.DBTABLE_TEMPORALENTITY + + " SET modifiedat = ?::timestamp WHERE id = ?"; + tn += writerJdbcTemplateWithTransaction.update(sql, entityModifiedAt, entityId); + } + return tn; + + } + }); + } else { + String sql; + if (entityId != null && attributeId != null && instanceId != null) { + sql = "DELETE FROM " + DBConstants.DBTABLE_TEMPORALENTITY_ATTRIBUTEINSTANCE + + " WHERE temporalentity_id = ? AND attributeid = ? AND instanceid = ?"; + n = writerJdbcTemplate.update(sql, entityId, attributeId, instanceId); + } else if (entityId != null && attributeId != null) { + sql = "DELETE FROM " + DBConstants.DBTABLE_TEMPORALENTITY_ATTRIBUTEINSTANCE + + " WHERE temporalentity_id = ? AND attributeid = ?"; + n = writerJdbcTemplate.update(sql, entityId, attributeId); + } else if (entityId != null) { + sql = "DELETE FROM " + DBConstants.DBTABLE_TEMPORALENTITY + " WHERE id = ?"; + n = writerJdbcTemplate.update(sql, entityId); + } + } + + logger.debug("Rows affected: " + Integer.toString(n)); + return true; + } catch (Exception e) { + logger.error("Exception ::", e); + e.printStackTrace(); + } + return false; + } + +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/IdValidationRule.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/IdValidationRule.java new file mode 100644 index 0000000000000000000000000000000000000000..47bd9f3160004b354056c39b11a1f237d6bbe48b --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/IdValidationRule.java @@ -0,0 +1,35 @@ +package eu.neclab.ngsildbroker.entityhandler.validationutil; + +import java.net.URI; + +import javax.servlet.http.HttpServletRequest; + +import org.springframework.stereotype.Component; + +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; + +@Component +public class IdValidationRule implements ValidationRules { + + @Override + public boolean validateEntity(Entity entity, HttpServletRequest request) throws ResponseException { + isValidURI(entity.getId().toString()); + return true; + } + + //check for whether id is a valid URI + private boolean isValidURI(String urlString) throws ResponseException { + try { + new URI(urlString); + if(!urlString.contains(":")) { + throw new ResponseException(ErrorType.BadRequestData,"id is not a URI"); + } + return true; + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData,"id is not a URI"); + } + } + +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/PayloadValidationRule.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/PayloadValidationRule.java new file mode 100644 index 0000000000000000000000000000000000000000..5deaadcf57e807cd69be0e13b72ccf3232c3f005 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/PayloadValidationRule.java @@ -0,0 +1,83 @@ +package eu.neclab.ngsildbroker.entityhandler.validationutil; + +import java.io.IOException; +import java.util.Iterator; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import javax.servlet.http.HttpServletRequest; + +import org.springframework.stereotype.Component; + +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.JsonNode; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; + +@Component +public class PayloadValidationRule implements ValidationRules{ + + /** + * expecting a unresolved raw payload + */ + public boolean validateEntity(String payload,HttpServletRequest request) throws ResponseException { + if (payload == null) { + throw new ResponseException(ErrorType.BadRequestData,"Empty request payload"); + } + JsonNode json = null; + try { + json = objectMapper.readTree(payload); + if (json.isNull()) { + throw new ResponseException(ErrorType.UnprocessableEntity); + } + isValidContentType(json,request.getContentType()); + isAttrsContainsForbiddenCharacters(json); + } catch (JsonParseException e) { + throw new ResponseException(ErrorType.BadRequestData); + } catch (IOException e) { + throw new ResponseException(ErrorType.BadRequestData); + } + return true; + } + + //check for forbidden characters in payload + private boolean isAttrsContainsForbiddenCharacters(JsonNode payload) throws ResponseException{ + //TODO : need to generate regex for all forbidden characters in NGB + Pattern p = Pattern.compile(".[<\"'=;()>?*]", Pattern.CASE_INSENSITIVE); + Iterator fieldNames=payload.fieldNames(); + while(fieldNames.hasNext()) { + Matcher m = p.matcher(fieldNames.next()); + if(m.find()) { + throw new ResponseException(ErrorType.BadRequestData,"Forbidden characters in payload body"); + } + } + return true; + } + + //check for json/@context OR json-LD/link validation in payload + private boolean isValidContentType(JsonNode payload,String contentType) throws ResponseException { + if(contentType.equalsIgnoreCase(AppConstants.NGB_APPLICATION_JSON)) { + if(payload.get("@context")!=null) { + throw new ResponseException(ErrorType.BadRequestData,"@context is provided in a JSON payload"); + } + }else if(contentType.equalsIgnoreCase(AppConstants.NGB_APPLICATION_JSONLD)) { + if(payload.get("@context")==null) { + throw new ResponseException(ErrorType.BadRequestData,"No @context is provided in a JSON-LD payload"); + } + //link header validation + }else { + throw new ResponseException(ErrorType.UnsupportedMediaType,"Media Type ( "+contentType+") is not supported in NGB"); + } + return true; + } + + @Override + public boolean validateEntity(Entity entity, HttpServletRequest request) throws ResponseException { + return true; + } + + +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/PropertyValidatioRule.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/PropertyValidatioRule.java new file mode 100644 index 0000000000000000000000000000000000000000..e298725ec1bc6aee6f4acc0fb95a619a02e492c9 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/PropertyValidatioRule.java @@ -0,0 +1,39 @@ +package eu.neclab.ngsildbroker.entityhandler.validationutil; + +import javax.servlet.http.HttpServletRequest; + +import org.springframework.stereotype.Component; + +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.datatypes.Property; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; + +@Component +public class PropertyValidatioRule implements ValidationRules { + + + @Override + public boolean validateEntity(Entity entity, HttpServletRequest request) throws ResponseException { + isNullProperty(entity); + return true; + } + + private boolean isNullProperty(Entity entity) throws ResponseException { + for(Property property:entity.getProperties()) { + if(property.getEntries()==null) { + throw new ResponseException(ErrorType.BadRequestData,"Entity with a property value equal to null"); + } + } + return true; + } + + @SuppressWarnings("unused") + //TODO use or remove + private boolean isEmptyProperty(Entity entity) { + return true; + } + + + +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/RelationshipValidationRule.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/RelationshipValidationRule.java new file mode 100644 index 0000000000000000000000000000000000000000..2fb9bc1acc56809432619858ecd25c49bbf0e19d --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/RelationshipValidationRule.java @@ -0,0 +1,35 @@ +package eu.neclab.ngsildbroker.entityhandler.validationutil; + +import javax.servlet.http.HttpServletRequest; + +import org.springframework.stereotype.Component; + +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.datatypes.Relationship; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; + +@Component +public class RelationshipValidationRule implements ValidationRules{ + + @Override + public boolean validateEntity(Entity entity,HttpServletRequest request) throws ResponseException { + isNullObject(entity); + isEmptyObject(entity); + return true; + } + + private boolean isNullObject(Entity entity) throws ResponseException { + for(Relationship relation:entity.getRelationships()) { + if(relation.getEntries()==null) { + throw new ResponseException(ErrorType.BadRequestData,"Entity with a Relationship object equal to null"); + } + } + return true; + } + + private boolean isEmptyObject(Entity entity) { + return true; + } + +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/TypeValidationRule.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/TypeValidationRule.java new file mode 100644 index 0000000000000000000000000000000000000000..3867f550b91475706d0e41bd86782c2f4a48cd40 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/TypeValidationRule.java @@ -0,0 +1,37 @@ +package eu.neclab.ngsildbroker.entityhandler.validationutil; + +import java.net.URI; + +import javax.servlet.http.HttpServletRequest; + +import org.springframework.stereotype.Component; + +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; + +@Component +public class TypeValidationRule implements ValidationRules{ + //TODO log or remove +// private final static Logger logger = LogManager.getLogger(TypeValidationRule.class); + + //should reject an entity which node type is not recognized OR null + @Override + public boolean validateEntity(Entity entity,HttpServletRequest request) throws ResponseException { + isTypeRecognized(entity.getType()); + return true; + } + + private boolean isTypeRecognized(String type) throws ResponseException{ + try { + new URI(type); + if(!type.contains(":")) { + throw new ResponseException(ErrorType.BadRequestData,"@type is not recognized"); + } + return true; + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData,"@type is not recognized"); + } + } + +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/ValidationRules.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/ValidationRules.java new file mode 100644 index 0000000000000000000000000000000000000000..c6578f14b48f0be0b62cdef8be6ce90a0d61e9cb --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/ValidationRules.java @@ -0,0 +1,13 @@ +package eu.neclab.ngsildbroker.entityhandler.validationutil; + +import javax.servlet.http.HttpServletRequest; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; + +public interface ValidationRules { + static final ObjectMapper objectMapper = new ObjectMapper(); + public boolean validateEntity(Entity entity,HttpServletRequest request) throws ResponseException; +} diff --git a/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/Validator.java b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/Validator.java new file mode 100644 index 0000000000000000000000000000000000000000..101aab5f1781763506a0499948c1b1eede6cafb0 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/validationutil/Validator.java @@ -0,0 +1,29 @@ +package eu.neclab.ngsildbroker.entityhandler.validationutil; + +import java.util.HashSet; +import java.util.Map; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; + +public class Validator { + + private static HashSet validParams = new HashSet(); + static { + validParams.add(NGSIConstants.QUERY_PARAMETER_DATA_SET_ID); + validParams.add(NGSIConstants.QUERY_PARAMETER_DELETE_ALL); + } + + /** + * Validate the query parameter and call this method from EntityController class. + * @param parameterMap + * @throws ResponseException + */ + public static void validate(Map parameterMap) throws ResponseException { + for (String key : parameterMap.keySet()) { + if (!validParams.contains(key)) { + throw new ResponseException(ErrorType.BadRequestData, key + " is not valid parameter"); + } + } + } +} diff --git a/scorpio-broker/Core/EntityManager/src/main/resources/application-aaio.yml b/scorpio-broker/Core/EntityManager/src/main/resources/application-aaio.yml new file mode 100644 index 0000000000000000000000000000000000000000..1c04e007a9552057a234c59ec4e6e41ee0cb2797 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/resources/application-aaio.yml @@ -0,0 +1,15 @@ +server: + port: 1025 + +spring: + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 +bootstrap: + servers: kafka:9092 +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ \ No newline at end of file diff --git a/scorpio-broker/Core/EntityManager/src/main/resources/application-aio.yml b/scorpio-broker/Core/EntityManager/src/main/resources/application-aio.yml new file mode 100644 index 0000000000000000000000000000000000000000..62b2b28f82c5ac071b8464ad03c713675d76867b --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/resources/application-aio.yml @@ -0,0 +1,15 @@ +server: + port: 1025 + +spring: + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 +bootstrap: + servers: localhost:9092 +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ \ No newline at end of file diff --git a/scorpio-broker/Core/EntityManager/src/main/resources/application-dist.yml b/scorpio-broker/Core/EntityManager/src/main/resources/application-dist.yml new file mode 100644 index 0000000000000000000000000000000000000000..20d09ea3c72f7625b4e60765ceebd977594fd008 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/resources/application-dist.yml @@ -0,0 +1,56 @@ +server: + port: 1025 + +spring: + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP + password: ngb + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_registrymanager + username: ngb + flyway: + baselineOnMigrate: true + +writer: + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP_Writer + password: ngb + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_entitymanager_writer + username: ngb + enabled: true +reader: + enabled: true + datasource: + url: "jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_entitymanager_reader" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP_Reader + maxLifetime: 2000000 + connectionTimeout: 30000 + +bootstrap: + servers: kafka:9092 +eureka: + client: + serviceUrl: + defaultZone: http://eureka:8761/eureka/ \ No newline at end of file diff --git a/scorpio-broker/Core/EntityManager/src/main/resources/application-test.yml b/scorpio-broker/Core/EntityManager/src/main/resources/application-test.yml new file mode 100644 index 0000000000000000000000000000000000000000..a15d432f7aa00b7720cdea8b67c417bc3ac60a96 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/resources/application-test.yml @@ -0,0 +1,61 @@ +spring: + application: + name: entity-manager + kafka: + consumer: + group-id: ngb + auto-offset-reset: earliest + admin: + properties: + cleanup: + policy: compact + +server: + port: 1025 + tomcat: + max: + threads: 50 + + +#Entity-Manager topics properties +entity: + topic: ENTITY + create: + topic: ENTITY_CREATE + append: + topic: ENTITY_APPEND + update: + topic: ENTITY_UPDATE + delete: + topic: ENTITY_DELETE + index: + topic: ENTITY_INDEX + +csources: + registration: + topic: CONTEXT_REGISTRY + + +#kafka broker path +bootstrap: + servers: ${spring.embedded.kafka.brokers} + + + + +#Entity Append ovewrite options +append: + overwrite: noOverwrite +writer: + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP_Writer + password: ngb + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_writer + username: ngb + enabled: true \ No newline at end of file diff --git a/scorpio-broker/Core/EntityManager/src/main/resources/application.yml b/scorpio-broker/Core/EntityManager/src/main/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..c34f427912f33c48be83ad6e0bbe2a82aa11052f --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/resources/application.yml @@ -0,0 +1,162 @@ + + +server: + port: 1025 + tomcat: + max: + threads: 50 +#Entity-Manager topics properties +entity: + topic: ENTITY + create: + topic: ENTITY_CREATE + append: + topic: ENTITY_APPEND + update: + topic: ENTITY_UPDATE + delete: + topic: ENTITY_DELETE + index: + topic: ENTITY_INDEX + +csources: + registration: + topic: CONTEXT_REGISTRY +#kafka broker path +bootstrap: + servers: localhost:9092 + +batchoperations: + maxnumber: + create: 1000 + update: 1000 + upsert: 1000 + delete: 1000 + + + +#Entity Append ovewrite options +append: + overwrite: noOverwrite + + + +management: + endpoints: + web: + exposure: + include: "*" + endpoint: + restart: + enabled: true + + + +spring: + application: + name: entity-manager + kafka: + admin: + properties: + cleanup: + policy: compact + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + bindings: + ENTITY_WRITE_CHANNEL: + destination: ENTITY + contentType: application/json +# ENTITY_READ_CHANNEL: +# destination: ENTITY +# contentType: application/json +# group: entity + KVENTITY_WRITE_CHANNEL: + destination: KVENTITY + contentType: application/json + ENTITY_WITHOUT_SYSATTRS_WRITE_CHANNEL: + destination: ENTITY_WITHOUT_SYSATTRS + contentType: application/json + CREATE_WRITE_CHANNEL: + destination: ENTITY_CREATE + contentType: application/json + #CREATE_READ_CHANNEL: + #destination: ENTITY_CREATE + #contentType: application/json + #group: create + APPEND_WRITE_CHANNEL: + destination: ENTITY_APPEND + contentType: application/json + #APPEND_READ_CHANNEL: + #destination: ENTITY_APPEND + #contentType: application/json + #group: append + UPDATE_WRITE_CHANNEL: + destination: ENTITY_UPDATE + contentType: application/json + #UPDATE_READ_CHANNEL: + #destination: ENTITY_UPDATE + #contentType: application/json + #group: update + DELETE_WRITE_CHANNEL: + destination: ENTITY_DELETE + contentType: application/json + #DELETE_READ_CHANNEL: + #destination: ENTITY_DELETE + #contentType: application/json + #group: delete + CONTEXT_REGISTRY_WRITE_CHANNEL: + destination: CONTEXT_REGISTRY + contentType: application/json + CONTEXT_REGISTRY_UPDATE_CHANNEL: + destination: CONTEXT_UPDATE + contentType: application/json + INDEX_WRITE_CHANNEL: + destination: ENTITY_INDEX + contentType: application/json + ATCONTEXT_WRITE_CHANNEL: + destination: ATCONTEXT + contentType: application/json + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP + password: ngb + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_registrymanager + username: ngb + flyway: + baselineOnMigrate: true + +writer: + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP_Writer + password: ngb + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_entitymanager_writer + username: ngb + enabled: true +reader: + enabled: true + datasource: + url: "jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_entitymanager_reader" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP_Reader + maxLifetime: 2000000 + connectionTimeout: 30000 + \ No newline at end of file diff --git a/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190604.1__entity.sql b/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190604.1__entity.sql new file mode 100644 index 0000000000000000000000000000000000000000..a8259f8bf603988add0925c6985b979640cdc13e --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190604.1__entity.sql @@ -0,0 +1,57 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS Entity ( + id TEXT NOT NULL, + type TEXT, + data JSONB NOT NULL, + context JSONB, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + observationSpace GEOMETRY(Geometry, 4326), + operationSpace GEOMETRY(Geometry, 4326), + PRIMARY KEY (id)) +; + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER entity_extract_jsonb_fields BEFORE INSERT OR UPDATE ON entity + FOR EACH ROW EXECUTE PROCEDURE entity_extract_jsonb_fields(); + +-- create indexes for performance +CREATE INDEX i_entity_type ON entity (type); +CREATE INDEX i_entity_createdat ON entity (createdat); +CREATE INDEX i_entity_modifiedat ON entity (modifiedat); +CREATE INDEX i_entity_location ON entity USING GIST (location); +CREATE INDEX i_entity_observationspace ON entity USING GIST (observationspace); +CREATE INDEX i_entity_operationspace ON entity USING GIST (operationspace); + +-- to check if this index will be used by the database optimizer, or if it should be applied only for for certain keys +-- check https://www.postgresql.org/docs/current/static/datatype-json.html +CREATE INDEX i_entity_data ON entity USING GIN (data); diff --git a/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190604.2__registry.sql b/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190604.2__registry.sql new file mode 100644 index 0000000000000000000000000000000000000000..51d767f1a6ee0368765817ebce039132d1160bd9 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190604.2__registry.sql @@ -0,0 +1,135 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS csource ( + id TEXT NOT NULL, + data JSONB NOT NULL, + type TEXT, + name TEXT, + description TEXT, + timestamp_start TIMESTAMP, + timestamp_end TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + expires TIMESTAMP, + endpoint TEXT, + internal boolean default false, + has_registrationinfo_with_attrs_only BOOL NOT NULL DEFAULT FALSE, + has_registrationinfo_with_entityinfo_only BOOL NOT NULL DEFAULT FALSE, + PRIMARY KEY (id)) +; + +-- create indexes for performance +CREATE INDEX i_csource_data ON csource USING GIN (data); +CREATE INDEX i_csource_name ON csource (name); +CREATE INDEX i_csource_timestamp_start ON csource (timestamp_start); +CREATE INDEX i_csource_timestamp_end ON csource (timestamp_end); +CREATE INDEX i_csource_location ON csource USING GIST (location); +CREATE INDEX i_csource_expires ON csource (expires); +CREATE INDEX i_csource_endpoint ON csource (endpoint); +CREATE INDEX i_csource_internal ON csource (internal); + +CREATE TABLE IF NOT EXISTS csourceinformation ( + id BIGSERIAL, + csource_id TEXT NOT NULL REFERENCES csource(id) ON DELETE CASCADE ON UPDATE CASCADE, + group_id BIGINT, + entity_id TEXT, + entity_idpattern TEXT, + entity_type TEXT, + property_id TEXT, + relationship_id TEXT, + PRIMARY KEY (id)) +; +CREATE SEQUENCE csourceinformation_group_id_seq OWNED BY csourceinformation.group_id; -- used by csource trigger +-- create indexes for performance +CREATE INDEX i_csourceinformation_csource_id ON csourceinformation (csource_id); +CREATE INDEX i_csourceinformation_entity_type_id_idpattern ON csourceinformation (entity_type, entity_id, entity_idpattern); +CREATE INDEX i_csourceinformation_entity_type_id ON csourceinformation (entity_type, entity_id); +CREATE INDEX i_csourceinformation_entity_type_idpattern ON csourceinformation (entity_type, entity_idpattern); +CREATE INDEX i_csourceinformation_property_id ON csourceinformation (property_id); +CREATE INDEX i_csourceinformation_relationship_id ON csourceinformation (relationship_id); +CREATE INDEX i_csourceinformation_group_property_relationship ON csourceinformation (group_id, property_id, relationship_id); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields_to_information_table(); \ No newline at end of file diff --git a/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190604.3__temporal.sql b/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190604.3__temporal.sql new file mode 100644 index 0000000000000000000000000000000000000000..bc9d603a2b3ed51124507c18e77df266bb80c91a --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190604.3__temporal.sql @@ -0,0 +1,105 @@ +BEGIN; + +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS temporalentity ( + id TEXT NOT NULL, + type TEXT, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + PRIMARY KEY (id)) +; + +CREATE TABLE IF NOT EXISTS temporalentityattrinstance ( + internalid BIGSERIAL, + temporalentity_id TEXT NOT NULL REFERENCES temporalentity(id) ON DELETE CASCADE ON UPDATE CASCADE, + attributeid TEXT NOT NULL, + instanceid TEXT, + attributetype TEXT, + value TEXT, -- object (relationship) is also stored here + geovalue GEOMETRY, + createdat TIMESTAMP, + modifiedat TIMESTAMP, + observedat TIMESTAMP, + data JSONB NOT NULL, + static BOOL NOT NULL, + PRIMARY KEY (internalid)) +; +CREATE UNIQUE INDEX i_temporalentityattrinstance_entityid_attributeid_instanceid ON temporalentityattrinstance (temporalentity_id, attributeid, instanceid); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_extract_jsonb_fields BEFORE INSERT OR UPDATE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_update_static() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + f_count integer; + BEGIN + select into f_internalid, f_count min(internalid), count(1) from temporalentityattrinstance + where temporalentity_id = OLD.temporalentity_id AND attributeid = OLD.attributeid; + IF (f_count = 1) THEN + UPDATE temporalentityattrinstance SET static = true WHERE internalid = f_internalid; + END IF; + RETURN OLD; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_update_static AFTER DELETE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_update_static(); + +-- create indexes for performance + +CREATE INDEX i_temporalentity_type ON temporalentity (type); + +CREATE INDEX i_temporalentityattrinstance_data ON temporalentityattrinstance USING GIN (data); + +COMMIT; \ No newline at end of file diff --git a/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190611.1__sysattrs.sql b/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190611.1__sysattrs.sql new file mode 100644 index 0000000000000000000000000000000000000000..3e24ff4111eb19227e14f1b3a2b90a2d863f337e --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190611.1__sysattrs.sql @@ -0,0 +1,7 @@ +-- entity +ALTER TABLE entity ALTER data DROP NOT NULL; +ALTER TABLE entity ADD data_without_sysattrs JSONB; + +-- csource +ALTER TABLE csource ALTER data DROP NOT NULL; +ALTER TABLE csource ADD data_without_sysattrs JSONB; diff --git a/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190611.2__extract_functions_optimization.sql b/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190611.2__extract_functions_optimization.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ab6fe3c66556cde740eaf56948e95d4556fa7c9 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190611.2__extract_functions_optimization.sql @@ -0,0 +1,121 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190703.1__keyvalues.sql b/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190703.1__keyvalues.sql new file mode 100644 index 0000000000000000000000000000000000000000..fae8d021431fadf39732600f684e69f5aa43447a --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190703.1__keyvalues.sql @@ -0,0 +1 @@ +ALTER TABLE entity ADD kvdata JSONB; diff --git a/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190704.1__extract_functions_bugfix.sql b/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190704.1__extract_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..13b2ff5ba21ce08cd58465e6b7b9240c592f6f5c --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/resources/db/migration/V20190704.1__extract_functions_bugfix.sql @@ -0,0 +1,131 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/Core/EntityManager/src/main/resources/log4j2-spring.xml b/scorpio-broker/Core/EntityManager/src/main/resources/log4j2-spring.xml new file mode 100644 index 0000000000000000000000000000000000000000..36841e763d57f933816eff782607e791f2997e0a --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/main/resources/log4j2-spring.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + %d %p %C{1.} [%t] %m%n + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/scorpio-broker/Core/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/EntityHandlerTest.java b/scorpio-broker/Core/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/EntityHandlerTest.java new file mode 100644 index 0000000000000000000000000000000000000000..ed29436d2a8e554a42b9e14d09ab0e33cebe6640 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/EntityHandlerTest.java @@ -0,0 +1,14 @@ +package eu.neclab.ngsildbroker.entityhandler; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest(properties= {"spring.main.allow-bean-definition-overriding=true"}) +public class EntityHandlerTest { + @Test + public void contextLoads() { + } +} diff --git a/scorpio-broker/Core/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/controller/EntityControllerTest.java b/scorpio-broker/Core/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/controller/EntityControllerTest.java new file mode 100644 index 0000000000000000000000000000000000000000..d2a372a96ba7f7f3168769cba03f80ca8668e077 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/controller/EntityControllerTest.java @@ -0,0 +1,735 @@ +package eu.neclab.ngsildbroker.entityhandler.controller; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import org.junit.After; +import org.junit.Before; +import org.junit.Assert; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.web.servlet.MockMvc; +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.datatypes.AppendResult; +import eu.neclab.ngsildbroker.commons.datatypes.UpdateResult; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.entityhandler.services.EntityService; + + +@SpringBootTest(properties= {"spring.main.allow-bean-definition-overriding=true"}) +@RunWith(SpringRunner.class) +@AutoConfigureMockMvc//(secure = false) +public class EntityControllerTest { + @Autowired + private MockMvc mockMvc; + @MockBean + private EntityService entityService; + + private String appendPayload; + private String updatePayload; + private String entityPayload; + private String partialUpdatePayload; + private String partialUpdateDefaultCasePayload; + + @Before + public void setup() throws Exception { + //@formatter:off + + appendPayload="{\r\n" + + " \"brandName1\": {\r\n" + + " \"type\": \"Property\",\r\n" + + " \"value\": \"BMW\"\r\n" + + " }\r\n" + + "}"; + + updatePayload="{\r\n" + + " \"brandName1\": {\r\n" + + " \"type\": \"Property\",\r\n" + + " \"value\": \"Audi\"\r\n" + + " }\r\n" + + "}"; + partialUpdatePayload= "{\r\n" + + " \"value\": 20,\r\n" + + " \"datasetId\": \"urn:ngsi-ld:Property:speedometerA4567-speed\"\r\n" + + "}"; + + partialUpdateDefaultCasePayload= "{\r\n" + + " \"value\": 11\r\n" + + "}"; + + entityPayload= "{ \r\n" + + " \"id\":\"urn:ngsi-ld:Vehicle:A101\",\r\n" + + " \"type\":\"Vehicle\",\r\n" + + " \"brandName\":\r\n" + + " { \r\n" + + " \"type\":\"Property\",\r\n" + + " \"value\":\"Mercedes\"\r\n" + + " },\r\n" + + " \"speed\":[{ \r\n" + + " \"type\":\"Property\",\r\n" + + " \"value\":55,\r\n" + + " \"datasetId\":\"urn:ngsi-ld:Property:speedometerA4567-speed\",\r\n" + + " \"source\":\r\n" + + " { \r\n" + + " \"type\":\"Property\",\r\n" + + " \"value\":\"Speedometer\"\r\n" + + " }\r\n" + + " },\r\n" + + " { \r\n" + + " \"type\":\"Property\",\r\n" + + " \"value\":60,\r\n" + + " \"source\":\r\n" + + " { \r\n" + + " \"type\":\"Property\",\r\n" + + " \"value\":\"GPS\"\r\n" + + " }\r\n" + + " },\r\n" + + " { \r\n" + + " \"type\":\"Property\",\r\n" + + " \"value\":52.5,\r\n" + + " \"source\":\r\n" + + " { \r\n" + + " \"type\":\"Property\",\r\n" + + " \"value\":\"GPS_NEW\"\r\n" + + " }\r\n" + + " }],\r\n" + + " \"createdAt\":\"2017-07-29T12:00:04Z\",\r\n" + + " \"modifiedAt\":\"2017-07-29T12:00:04Z\",\r\n" + + " \"location\":\r\n" + + " { \r\n" + + " \"type\":\"GeoProperty\",\r\n" + + " \"value\": \"{ \\\"type\\\": \\\"Point\\\", \\\"coordinates\\\": [ -8.5, 41.2]}\""+ + " }\r\n" + + "}"; + + //@formatter:on + } + + + @After + public void tearDown() { + appendPayload=null; + updatePayload=null; + entityPayload=null; + partialUpdatePayload=null; + partialUpdateDefaultCasePayload=null; + } + + /** + * this method is use for create the entity + */ + @Test + public void createEntityTest() { + try { + when(entityService.createMessage(any())).thenReturn("urn:ngsi-ld:Vehicle:A101"); + mockMvc.perform(post("/ngsi-ld/v1/entities/").contentType(AppConstants.NGB_APPLICATION_JSON) + .accept(AppConstants.NGB_APPLICATION_JSONLD).content(entityPayload)).andExpect(status().isCreated()) + .andExpect(redirectedUrl("/ngsi-ld/v1/entities/urn:ngsi-ld:Vehicle:A101")); + verify(entityService, times(1)).createMessage(any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is use for the entity if entity already exist + */ + @Test + public void createEntityAlreadyExistTest() { + try { + when(entityService.createMessage(any())).thenThrow(new ResponseException(ErrorType.AlreadyExists)); + mockMvc.perform(post("/ngsi-ld/v1/entities/").contentType(AppConstants.NGB_APPLICATION_JSON) + .accept(AppConstants.NGB_APPLICATION_JSONLD).content(entityPayload)) + .andExpect(status().isConflict()).andExpect(jsonPath("$.title").value("Already exists.")); + verify(entityService, times(1)).createMessage(any()); + } catch (Exception e) { + Assert.fail(); + } + } + + /** + * this method is validate for the bad request if create the entity + */ + @Test + public void createEntityBadRequestTest() { + try { + when(entityService.createMessage(any())).thenThrow(new ResponseException(ErrorType.BadRequestData)); + mockMvc.perform(post("/ngsi-ld/v1/entities/").contentType(AppConstants.NGB_APPLICATION_JSON) + .accept(AppConstants.NGB_APPLICATION_JSONLD).content(entityPayload)) + .andExpect(status().isBadRequest()).andExpect(jsonPath("$.title").value("Bad Request Data.")); + verify(entityService, times(1)).createMessage(any()); + } catch (Exception e) { + e.printStackTrace(); + Assert.fail(); + } + } + + /** + * this method is validate for throw the exception if create the entity + */ + @Test + public void createEntity500ExceptionTest() { + try { + when(entityService.createMessage(any())).thenThrow(new Exception()); + mockMvc.perform(post("/ngsi-ld/v1/entities/").contentType(AppConstants.NGB_APPLICATION_JSON) + .accept(AppConstants.NGB_APPLICATION_JSONLD).content(entityPayload)) + .andExpect(status().isInternalServerError()).andExpect(jsonPath("$.title").value("Internal error")); + verify(entityService, times(1)).createMessage(any()); + } catch (Exception e) { + e.printStackTrace(); + Assert.fail(); + } + } + + /** + * this method is use for append the attribute in entity + */ + @Test + public void appendEntityNoContentTest() { + try { + AppendResult appendResult = Mockito.mock(AppendResult.class); + when(entityService.appendMessage(any(), any(), any())).thenReturn(appendResult); + when(appendResult.getAppendResult()).thenReturn(true); + mockMvc.perform(post("/ngsi-ld/v1/entities/{entityId}/attrs", "urn:ngsi-ld:Vehicle:A101") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(appendPayload)).andExpect(status().isNoContent()); + verify(entityService, times(1)).appendMessage(any(), any(), any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is use for append the attribute in entity for multi status + */ + @Test + public void appendEntityMultiStatusTest() { + try { + AppendResult appendResult = Mockito.mock(AppendResult.class); + when(entityService.appendMessage(any(), any(), any())).thenReturn(appendResult); + mockMvc.perform(post("/ngsi-ld/v1/entities/{entityId}/attrs", "urn:ngsi-ld:Vehicle:A101") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(appendPayload)).andExpect(status().isMultiStatus()); + verify(entityService, times(1)).appendMessage(any(), any(), any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is validate the bad request if append the attribute in entity + */ + @Test + public void appendEntityBadRequestTest() { + try { + when(entityService.appendMessage(any(), any(), any())) + .thenThrow(new ResponseException(ErrorType.BadRequestData)); + mockMvc.perform(post("/ngsi-ld/v1/entities/{entityId}/attrs", "urn:ngsi-ld:Vehicle:A101") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(appendPayload)).andExpect(status().isBadRequest()) + .andExpect(jsonPath("$.title").value("Bad Request Data.")); + verify(entityService, times(1)).appendMessage(any(), any(), any()); + } catch (Exception e) { + e.printStackTrace(); + Assert.fail(); + } + } + + /** + * this method is validate the data not found if append the attribute in entity + */ + @Test + public void appendEntityNotFoundTest() { + try { + when(entityService.appendMessage(any(), any(), any())).thenThrow(new ResponseException(ErrorType.NotFound)); + mockMvc.perform(post("/ngsi-ld/v1/entities/{entityId}/attrs", "urn:ngsi-ld:Vehicle:A101") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(appendPayload)).andExpect(status().isNotFound()) + .andExpect(jsonPath("$.title").value("Resource not found.")); + verify(entityService, times(1)).appendMessage(any(), any(), any()); + } catch (Exception e) { + e.printStackTrace(); + Assert.fail(); + } + } + + /** + * this method is validate throw the exception if append the attribute in entity + */ + @Test + public void appendEntity500Test() { + try { + when(entityService.appendMessage(any(), any(), any())).thenThrow(new Exception()); + mockMvc.perform(post("/ngsi-ld/v1/entities/{entityId}/attrs", "urn:ngsi-ld:Vehicle:A101") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(appendPayload)).andExpect(status().isInternalServerError()) + .andExpect(jsonPath("$.title").value("Internal error")); + verify(entityService, times(1)).appendMessage(any(), any(), any()); + } catch (Exception e) { + e.printStackTrace(); + Assert.fail(); + } + } + + /** + * this method is use for update the entity + */ + @Test + public void updateEntityNoContentTest() { + try { + UpdateResult updateResult = Mockito.mock(UpdateResult.class); + when(entityService.updateMessage(any(), any())).thenReturn(updateResult); + when(updateResult.getUpdateResult()).thenReturn(true); + mockMvc.perform(patch("/ngsi-ld/v1/entities/{entityId}/attrs", "urn:ngsi-ld:Vehicle:A101") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(updatePayload)).andExpect(status().isNoContent()); + verify(entityService, times(1)).updateMessage(any(), any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is use for update the entity for multi status + */ + @Test + public void updateEntityMultiStatusTest() { + try { + UpdateResult updateResult = Mockito.mock(UpdateResult.class); + when(entityService.updateMessage(any(), any())).thenReturn(updateResult); + mockMvc.perform(patch("/ngsi-ld/v1/entities/{entityId}/attrs", "urn:ngsi-ld:Vehicle:A101") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(updatePayload)).andExpect(status().isMultiStatus()); + verify(entityService, times(1)).updateMessage(any(), any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is validate the bad request if entity update + */ + @Test + public void updateEntityBadRequestTest() { + try { + when(entityService.updateMessage(any(), any())).thenThrow(new ResponseException(ErrorType.BadRequestData)); + mockMvc.perform(patch("/ngsi-ld/v1/entities/{entityId}/attrs", "urn:ngsi-ld:Vehicle:A101") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(updatePayload)).andExpect(status().isBadRequest()) + .andExpect(jsonPath("$.title").value("Bad Request Data.")); + verify(entityService, times(1)).updateMessage(any(), any()); + } catch (Exception e) { + Assert.fail(); + } + } + + /** + * this method is validate the data not found if entity update + */ + @Test + public void updateEntityNotFoundTest() { + try { + when(entityService.updateMessage(any(), any())).thenThrow(new ResponseException(ErrorType.NotFound)); + mockMvc.perform(patch("/ngsi-ld/v1/entities/{entityId}/attrs", "urn:ngsi-ld:Vehicle:A101") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(updatePayload)).andExpect(status().isNotFound()) + .andExpect(jsonPath("$.title").value("Resource not found.")); + verify(entityService, times(1)).updateMessage(any(), any()); + } catch (Exception e) { + e.printStackTrace(); + Assert.fail(); + } + } + + /** + * this method is validate throw the exception if entity update + */ + @Test + public void updateEntity500Test() { + try { + when(entityService.updateMessage(any(), any())).thenThrow(new Exception()); + mockMvc.perform(patch("/ngsi-ld/v1/entities/{entityId}/attrs", "urn:ngsi-ld:Vehicle:A101") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(updatePayload)).andExpect(status().isInternalServerError()) + .andExpect(jsonPath("$.title").value("Internal error")); + verify(entityService, times(1)).updateMessage(any(), any()); + } catch (Exception e) { + e.printStackTrace(); + Assert.fail(); + } + } + + /** + * this method is use for partial update the attribute + */ + @Test + public void partialUpdateAttributeIfDatasetIdExistNoContentTest() { + try { + UpdateResult updateResult = Mockito.mock(UpdateResult.class); + when(entityService.partialUpdateEntity(any(), any(), any())).thenReturn(updateResult); + when(updateResult.getStatus()).thenReturn(true); + mockMvc.perform(patch("/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}", "urn:ngsi-ld:Vehicle:A101", "speed") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(partialUpdatePayload)).andExpect(status().isNoContent()); + verify(entityService, times(1)).partialUpdateEntity(any(), any(), any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is use for partial update the attribute if datasetId is not exist + */ + @Test + public void partialUpdateAttributeIfDatasetIdIsNotExistTest() { + try { + when(entityService.partialUpdateEntity(any(), any(), any())) + .thenThrow(new ResponseException(ErrorType.NotFound)); + mockMvc.perform(patch("/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}", "urn:ngsi-ld:Vehicle:A101", "speed") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(partialUpdatePayload)).andExpect(status().isNotFound()) + .andExpect(jsonPath("$.title").value("Resource not found.")); + verify(entityService, times(1)).partialUpdateEntity(any(), any(), any()); + } catch (Exception e) { + e.printStackTrace(); + Assert.fail(); + } + } + + /** + * this method is validate the bad request in partial update attribute + */ + @Test + public void partialUpdateAttributeBadRequestTest() { + try { + when(entityService.partialUpdateEntity(any(), any(), any())) + .thenThrow(new ResponseException(ErrorType.BadRequestData)); + mockMvc.perform(patch("/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}", "urn:ngsi-ld:Vehicle:A101", "speed") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(updatePayload)).andExpect(status().isBadRequest()) + .andExpect(jsonPath("$.title").value("Bad Request Data.")); + verify(entityService, times(1)).partialUpdateEntity(any(), any(), any()); + } catch (Exception e) { + e.printStackTrace(); + Assert.fail(); + } + } + + /** + * this method is use for partial update attribute default datasetId + */ + @Test + public void partialUpdateAttributeDefaultDatasetIdCaseTest() { + try { + UpdateResult updateResult = Mockito.mock(UpdateResult.class); + when(entityService.partialUpdateEntity(any(), any(), any())).thenReturn(updateResult); + when(updateResult.getStatus()).thenReturn(true); + mockMvc.perform(patch("/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}", "urn:ngsi-ld:Vehicle:A101", "speed") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(partialUpdateDefaultCasePayload)).andExpect(status().isNoContent()); + verify(entityService, times(1)).partialUpdateEntity(any(), any(), any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is validate for the not found in partial update attribute default datasetId + */ + @Test + public void partialUpdateAttributeDefaultDatasetIdCaseNotFoundTest() { + try { + when(entityService.partialUpdateEntity(any(), any(), any())) + .thenThrow(new ResponseException(ErrorType.NotFound)); + mockMvc.perform(patch("/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}", "urn:ngsi-ld:Vehicle:A101", "speed") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(partialUpdateDefaultCasePayload)).andExpect(status().isNotFound()) + .andExpect(jsonPath("$.title").value("Resource not found.")); + verify(entityService, times(1)).partialUpdateEntity(any(), any(), any()); + } catch (Exception e) { + e.printStackTrace(); + Assert.fail(); + } + } + + /** + * this method is validate throw the exception in case of partial update attribute + */ + @Test + public void partialUpdateAttribute500Test() { + try { + when(entityService.partialUpdateEntity(any(), any(), any())).thenThrow(new Exception()); + mockMvc.perform(patch("/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}", "urn:ngsi-ld:Vehicle:A101", "speed") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(partialUpdateDefaultCasePayload)).andExpect(status().isInternalServerError()) + .andExpect(jsonPath("$.title").value("Internal error")); + verify(entityService, times(1)).partialUpdateEntity(any(), any(), any()); + } catch (Exception e) { + e.printStackTrace(); + Assert.fail(); + } + } + + /** + * this method is use for delete the entity + */ + @Test + public void deleteEntityTest() { + try { + when(entityService.deleteEntity(any())).thenReturn(true); + mockMvc.perform(delete("/ngsi-ld/v1/entities/{entityId}", "urn:ngsi-ld:Vehicle:A101") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)).andExpect(status().isNoContent()); + verify(entityService, times(1)).deleteEntity(any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is validate the not found if delete the entity + */ + @Test + public void deleteEntityNotFoundTest() { + try { + when(entityService.deleteEntity(any())).thenThrow(new ResponseException(ErrorType.NotFound)); + mockMvc.perform(delete("/ngsi-ld/v1/entities/{entityId}", "urn:ngsi-ld:Vehicle:A101") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)).andExpect(status().isNotFound()) + .andExpect(jsonPath("$.title").value("Resource not found.")); + verify(entityService, times(1)).deleteEntity(any()); + } catch (Exception e) { + Assert.fail(); + } + } + + /** + * this method is validate the bad request if delete the entity + */ + @Test + public void deleteEntityBadRequestTest() { + try { + when(entityService.deleteEntity(any())).thenThrow(new ResponseException(ErrorType.BadRequestData)); + mockMvc.perform(delete("/ngsi-ld/v1/entities/{entityId}", "urn:ngsi-ld:Vehicle:A101") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)).andExpect(status().isBadRequest()) + .andExpect(jsonPath("$.title").value("Bad Request Data.")); + verify(entityService, times(1)).deleteEntity(any()); + } catch (Exception e) { + Assert.fail(); + } + } + + /** + * this method is validate throw exception if delete the entity + */ + @Test + public void deleteEntity500Test() { + try { + when(entityService.deleteEntity(any())).thenThrow(new Exception()); + mockMvc.perform(delete("/ngsi-ld/v1/entities/{entityId}", "urn:ngsi-ld:Vehicle:A101") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)).andExpect(status().isInternalServerError()) + .andExpect(jsonPath("$.title").value("Internal error")); + verify(entityService, times(1)).deleteEntity(any()); + } catch (Exception e) { + Assert.fail(); + } + } + + /** + * this method is validate for the datasetId is not exist in case of delete attribute instance + */ + @Test + public void deleteAttributeInstanceIfDatasetIdExistTest() { + try { + when(entityService.deleteAttribute(any(), any(), any(), any())).thenReturn(true); + mockMvc.perform(delete( + "/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}?datasetId=urn:ngsi-ld:Property:speedometerA4567-speed", + "urn:ngsi-ld:Vehicle:A101", "speed").contentType(AppConstants.NGB_APPLICATION_JSONLD)) + .andExpect(status().isNoContent()); + verify(entityService, times(1)).deleteAttribute(any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is validate for the datasetId is not exist in case of delete attribute instance + */ + @Test + public void deleteAttributeInstanceIfDatasetIdNotExistTest() { + try { + when(entityService.deleteAttribute(any(), any(), any(), any())) + .thenThrow(new ResponseException(ErrorType.NotFound)); + mockMvc.perform(delete( + "/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}?datasetId=urn:ngsi-ld:Property:speedometerA4567-speed", + "urn:ngsi-ld:Vehicle:A101", "speed").contentType(AppConstants.NGB_APPLICATION_JSONLD)) + .andExpect(status().isNotFound()).andExpect(jsonPath("$.title").value("Resource not found.")); + verify(entityService, times(1)).deleteAttribute(any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(); + } + } + + /** + * this method is validate for bad request in case of delete attribute + */ + @Test + public void deleteAttributeInstanceBadRequestTest() { + try { + when(entityService.deleteAttribute(any(), any(), any(), any())) + .thenThrow(new ResponseException(ErrorType.BadRequestData)); + mockMvc.perform(delete( + "/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}?datasetId=urn:ngsi-ld:Property:speedometerA4567-speed", + "urn:ngsi-ld:Vehicle:A101", "speed").contentType(AppConstants.NGB_APPLICATION_JSONLD)) + .andExpect(jsonPath("$.title").value("Bad Request Data.")); + verify(entityService, times(1)).deleteAttribute(any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(); + } + } + + /** + * this method is validate default instance exist in case of delete attribute + */ + @Test + public void deleteAttributeDefaultInstanceIfExistTest() { + try { + when(entityService.deleteAttribute(any(), any(), any(), any())).thenReturn(true); + mockMvc.perform( + delete("/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}", "urn:ngsi-ld:Vehicle:A101", "speed") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)) + .andExpect(status().isNoContent()); + verify(entityService, times(1)).deleteAttribute(any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is validate default instance not exist in case of delete attribute + */ + @Test + public void deleteAttributeDefaultInstanceNotExistTest() { + try { + when(entityService.deleteAttribute(any(), any(), any(), any())) + .thenThrow(new ResponseException(ErrorType.NotFound)); + mockMvc.perform( + delete("/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}", "urn:ngsi-ld:Vehicle:A101", "speed") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)) + .andExpect(status().isNotFound()).andExpect(jsonPath("$.title").value("Resource not found.")); + verify(entityService, times(1)).deleteAttribute(any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(); + } + } + + /** + * this method is validate not found in case of delete attribute + */ + @Test + public void deleteAttributeNotFoundTest() { + try { + when(entityService.deleteAttribute(any(), any(), any(), any())) + .thenThrow(new ResponseException(ErrorType.NotFound)); + mockMvc.perform( + delete("/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}", "urn:ngsi-ld:Vehicle:A101", "speed") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)) + .andExpect(status().isNotFound()).andExpect(jsonPath("$.title").value("Resource not found.")); + verify(entityService, times(1)).deleteAttribute(any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(); + } + } + + /** + * this method is use for all the delete attribute + */ + @Test + public void deleteAllAttributeInstanceTest() { + try { + when(entityService.deleteAttribute(any(), any(), any(), any())).thenReturn(true); + mockMvc.perform(delete("/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}?deleteAll=true", + "urn:ngsi-ld:Vehicle:A101", "speed").contentType(AppConstants.NGB_APPLICATION_JSONLD)) + .andExpect(status().isNoContent()); + verify(entityService, times(1)).deleteAttribute(any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is validate the bad request in case of delete all attribute + */ + @Test + public void deleteAllAttributeInstanceBadRequestTest() { + try { + when(entityService.deleteAttribute(any(), any(), any(), any())) + .thenThrow(new ResponseException(ErrorType.BadRequestData)); + mockMvc.perform(delete("/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}?deleteAll=true", + "urn:ngsi-ld:Vehicle:A101", "speed").contentType(AppConstants.NGB_APPLICATION_JSONLD)) + .andExpect(status().isBadRequest()).andExpect(jsonPath("$.title").value("Bad Request Data.")); + verify(entityService, times(1)).deleteAttribute(any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(); + } + } + + /** + * this method is validate for bad request in case of delete attribute + */ + @Test + public void deleteAttributeBadRequestTest() { + try { + when(entityService.deleteAttribute(any(), any(), any(), any())) + .thenThrow(new ResponseException(ErrorType.BadRequestData)); + mockMvc.perform( + delete("/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}", "urn:ngsi-ld:Vehicle:A101", "speed") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)) + .andExpect(status().isBadRequest()).andExpect(jsonPath("$.title").value("Bad Request Data.")); + verify(entityService, times(1)).deleteAttribute(any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(); + } + } + + /** + * this method is validate throw the exception in case of delete attribute + */ + @Test + public void deleteAttribute500Test() { + try { + when(entityService.deleteAttribute(any(), any(), any(), any())).thenThrow(new Exception()); + mockMvc.perform( + delete("/ngsi-ld/v1/entities/{entityId}/attrs/{attrId}", "urn:ngsi-ld:Vehicle:A101", "speed") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)) + .andExpect(status().isInternalServerError()).andExpect(jsonPath("$.title").value("Internal error")); + verify(entityService, times(1)).deleteAttribute(any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(); + } + } + +} diff --git a/scorpio-broker/Core/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/services/EntityServiceTest.java b/scorpio-broker/Core/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/services/EntityServiceTest.java new file mode 100644 index 0000000000000000000000000000000000000000..a18ec2e34be4a3a061ab846e3993dc7df14bd19f --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/services/EntityServiceTest.java @@ -0,0 +1,363 @@ +package eu.neclab.ngsildbroker.entityhandler.services; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.mockito.Spy; +import org.powermock.core.classloader.annotations.PowerMockIgnore; +import org.powermock.modules.junit4.PowerMockRunner; +import org.powermock.modules.junit4.PowerMockRunnerDelegate; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.util.ReflectionTestUtils; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.neclab.ngsildbroker.commons.datatypes.AppendResult; +import eu.neclab.ngsildbroker.commons.datatypes.EntityDetails; +import eu.neclab.ngsildbroker.commons.datatypes.UpdateResult; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.entityhandler.config.EntityProducerChannel; +import eu.neclab.ngsildbroker.entityhandler.config.EntityTopicMap; + +//@RunWith(SpringRunner.class) +//@SpringBootTest +//Set powemock runner +@RunWith(PowerMockRunner.class) +// Delegate to Spring +@PowerMockRunnerDelegate(SpringRunner.class) +@PowerMockIgnore({ "javax.management.*" }) +public class EntityServiceTest { + + @Mock + KafkaOps operations; + @MockBean + private ObjectMapper objectMapper; + @MockBean + private EntityTopicMap entityTopicMap; + @Mock + EntityProducerChannel entityProducerChannel; + @InjectMocks + @Spy + private EntityService entityService; + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + String updatePayload; + String appendPayload; + String entityPayload; + String updatePartialAttributesPayload; + String updatePartialDefaultAttributesPayload; + JsonNode updateJsonNode; + JsonNode appendJsonNode; + JsonNode blankNode; + JsonNode payloadNode; + JsonNode updatePartialAttributesNode; + JsonNode updatePartialDefaultAttributesNode; + + @Before + public void setUp() throws Exception { + MockitoAnnotations.initMocks(this); + ReflectionTestUtils.setField(entityService, "appendOverwriteFlag", "noOverwrite"); + ObjectMapper objectMapper=new ObjectMapper(); + + //@formatter:off + + entityPayload="{\r\n" + + " \"http://example.org/vehicle/brandName\": [\r\n" + + " {\r\n" + + " \"@type\":[\r\n" + + " \"https://uri.etsi.org/ngsi-ld/Property\"],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\": [{\r\n" + + " \"@value\": \"Mercedes\"\r\n" + + " }]\r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/createdAt\": [\r\n" + + " {\r\n" + + " \"@type\": \"https://uri.etsi.org/ngsi-ld/DateTime\",\r\n" + + " \"@value\": \"2018-06-01T12:03:00Z\"\r\n" + + " }\r\n" + + " ],\r\n" + + " \"@id\": \"urn:ngsi-ld:Vehicle:A103\",\r\n" + + " \"https://uri.etsi.org/ngsi-ld/modifiedAt\":[{\r\n" + + " \"@value\": \"2017-07-29T12:00:04Z\",\r\n" + + " \"@type\": \"https://uri.etsi.org/ngsi-ld/DateTime\"}],\r\n" + + " \"http://example.org/vehicle/speed\": [\r\n" + + " {\r\n" + + " \"https://uri.etsi.org/ngsi-ld/datasetId\": [\r\n" + + " {\r\n" + + " \"@id\": \"urn:ngsi-ld:Property:speedometerA4567-speed\"\r\n" + + " }\r\n" + + " ],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/default-context/source\":[\r\n" + + " {\r\n" + + " \"@type\":[\r\n" + + " \"https://https://uri.etsi.org/ngsi-ld/Property\"],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\":[\r\n" + + " {\r\n" + + " \"@value\": \"Speedometer\"\r\n" + + " }\r\n" + + " ]\r\n" + + " }\r\n" + + " ],\r\n" + + " \"@type\":[\r\n" + + " \"https://uri.etsi.org/ngsi-ld/Property\"],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\":[\r\n" + + " {\r\n" + + " \"value\":55\r\n" + + " }\r\n" + + " ]\r\n" + + " },\r\n" + + " {\r\n" + + " \"https://uri.etsi.org/ngsi-ld/default-context/source\":[\r\n" + + " {\r\n" + + " \"@type\":[\r\n" + + " \"https://https://uri.etsi.org/ngsi-ld/Property\"],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\":[\r\n" + + " {\r\n" + + " \"@value\": \"GPS\"\r\n" + + " }\r\n" + + " ]\r\n" + + " }\r\n" + + " ],\r\n" + + " \"@type\":[\r\n" + + " \"https://https://uri.etsi.org/ngsi-ld/Property\"],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\":[\r\n" + + " {\r\n" + + " \"value\":10\r\n" + + " }\r\n" + + " ]\r\n" + + " }\r\n" + + " ],\r\n" + + " \"@type\":[\r\n" + + " \"http://example.org/vehicle/Vehicle\"]\r\n" + + " }\r\n"; + + updatePayload="{\r\n" + + " \"http://example.org/vehicle/brandName\": [{\r\n" + + " \"@type\": [\"https://uri.etsi.org/ngsi-ld/Property\"],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\": [{\r\n" + + " \"@value\": \"AUDI\"\r\n" + + " }]\r\n" + + " }]\r\n" + + "}"; + updatePartialAttributesPayload="{\r\n" + + " \"https://uri.etsi.org/ngsi-ld/datasetId\": [{\r\n" + + " \"@id\": \"urn:ngsi-ld:Property:speedometerA4567-speed\" \r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\": [{\r\n" + + " \"@value\": \"20\"\r\n" + + " }]\r\n" + + "}"; + updatePartialDefaultAttributesPayload="{\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\": [{\r\n" + + " \"@value\": \"20\"\r\n" + + " }]\r\n" + + "}"; + + appendPayload="{\r\n" + + " \"http://example.org/vehicle/brandName1\": [{\r\n" + + " \"@type\": [\"https://uri.etsi.org/ngsi-ld/Property\"],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\": [{\r\n" + + " \"@value\": \"BMW\"\r\n" + + " }]\r\n" + + " }]\r\n" + + "}"; + + //@formatter:on + + updateJsonNode=objectMapper.readTree(updatePayload); + appendJsonNode=objectMapper.readTree(appendPayload); + blankNode=objectMapper.createObjectNode(); + payloadNode=objectMapper.readTree(entityPayload); + updatePartialAttributesNode=objectMapper.readTree(updatePartialAttributesPayload); + updatePartialDefaultAttributesNode=objectMapper.readTree(updatePartialDefaultAttributesPayload); + } + + @After + public void tearDown() { + updatePayload = null; + appendPayload=null; + entityPayload=null; + updatePartialAttributesPayload=null; + updatePartialDefaultAttributesPayload=null; + } + + /** + * this method is use for create the entity + */ + @Test + public void createMessageTest(){ + try { + JsonNode jsonNode = Mockito.mock(JsonNode.class); + Mockito.doReturn(jsonNode).when(objectMapper).readTree(entityPayload); + Mockito.doReturn(entityPayload).when(objectMapper).writeValueAsString(any()); + Mockito.doReturn(jsonNode).when(jsonNode).get(any()); + Mockito.doReturn(false).when(entityTopicMap).isExist(any()); + Mockito.doReturn("urn:ngsi-ld:Vehicle:A103").when(jsonNode).asText(); + Mockito.doReturn(true).when(entityService).registerContext(any(), any()); + Mockito.doReturn(true).when(operations).pushToKafka(any(), any(), any()); + Mockito.doReturn(jsonNode).when(entityService).getKeyValueEntity(jsonNode); + + String id = entityService.createMessage(entityPayload); + Assert.assertEquals(id, "urn:ngsi-ld:Vehicle:A103"); + verify(entityService, times(1)).getKeyValueEntity(any()); + }catch( Exception ex) { + Assert.fail(); + } + + } + + /** + * this method is validate the entity if already exist + * @throws ResponseException + * @throws Exception + */ + @Test + public void createMessageThrowsAlreadyExistTest() throws ResponseException, Exception { + thrown.expect(ResponseException.class); + thrown.expectMessage("Already exists."); + JsonNode jsonNode = Mockito.mock(JsonNode.class); + Mockito.doReturn(jsonNode).when(objectMapper).readTree(entityPayload); + Mockito.doReturn(jsonNode).when(jsonNode).get(any()); + Mockito.doReturn(true).when(entityTopicMap).isExist(any()); + Mockito.doReturn("urn:ngsi-ld:Vehicle:A103").when(jsonNode).asText(); + Mockito.doReturn(true).when(entityService).registerContext(any(), any()); + Mockito.doReturn(true).when(operations).pushToKafka(any(), any(), any()); + Mockito.doThrow(new ResponseException(ErrorType.AlreadyExists)).when(entityService).createMessage(any()); + entityService.createMessage(entityPayload); + verify(entityTopicMap, times(1)).isExist(any()); + + } + + /** + * this method is use for update the entity + * @throws Exception + */ + @Test + public void updateMessageTest() throws Exception{ + + EntityDetails entityDetails=Mockito.mock(EntityDetails.class); + byte[] messageByte=entityPayload.getBytes(); + JsonNode resultJson = objectMapper.createObjectNode(); + UpdateResult updateResult = new UpdateResult(updateJsonNode, resultJson); + updateResult.setStatus(true); + + Mockito.doReturn(entityDetails).when(entityTopicMap).get(any()); + Mockito.doReturn(messageByte).when(operations).getMessage(any(), any(), any(Integer.class), any(Long.class)); + Mockito.doReturn(payloadNode).when(objectMapper).readTree(any(String.class)); + Mockito.doReturn(blankNode).when(objectMapper).createObjectNode(); +// Mockito.doReturn(updateResult).when(entityService).updateFields(messageByte, updateJsonNode, null); + //TODO no assert. no usage of result + Mockito.doReturn(updateResult).when(entityService).updateMessage(any(), any()); + entityService.updateMessage("urn:ngsi-ld:Vehicle:A103", updatePayload); + verify(entityService, times(1)).updateMessage(any(),any()); + + + } + + /** + * this method is use for append the field or attribute in entity + */ + @Test + public void appendFieldTest(){ + try { + Mockito.doReturn(blankNode).when(objectMapper).createObjectNode(); + Mockito.doReturn(payloadNode).when(objectMapper).readTree(any(String.class)); + + AppendResult appendResult=entityService.appendFields(entityPayload, appendJsonNode, " "); + + Assert.assertTrue(appendResult.getStatus()); + }catch(Exception ex) { + Assert.fail(); + } + } + + /** + * this method is use for the update attribute field + */ + @Test + public void updateAttributeFieldTest() { + try { + Mockito.doReturn(blankNode).when(objectMapper).createObjectNode(); + Mockito.doReturn(payloadNode).when(objectMapper).readTree(any(String.class)); + + + UpdateResult updateResult=entityService.updateFields(entityPayload,updateJsonNode , null); + Assert.assertTrue(updateResult.getStatus()); + Assert.assertEquals(updateJsonNode, updateResult.getJsonToAppend()); + }catch (Exception ex) { + Assert.fail(); + } + } + + /** + * this method is use for the update partial attribute field + */ + @Test + public void updatePartialAttributeFieldTest() { + try { + Mockito.doReturn(blankNode).when(objectMapper).createObjectNode(); + Mockito.doReturn(payloadNode).when(objectMapper).readTree(any(String.class)); + AppendResult appendResult=entityService.appendFields(entityPayload, updateJsonNode, " "); + Assert.assertTrue(appendResult.getStatus()); + }catch(Exception ex) { + Assert.fail(); + } + } + + + public void updatePartialDefaultAttributeFieldTest() { + try { + Mockito.doReturn(blankNode).when(objectMapper).createObjectNode(); + Mockito.doReturn(payloadNode).when(objectMapper).readTree(any(String.class)); + UpdateResult updateResult=entityService.updateFields(entityPayload,updatePartialDefaultAttributesNode , "http://example.org/vehicle/speed"); + Assert.assertTrue(updateResult.getStatus()); + Assert.assertEquals(updatePartialDefaultAttributesNode, updateResult.getJsonToAppend()); + }catch (Exception ex) { + Assert.fail(); + } + } + + + /** + * this method is use for the datasetId is exist in case of delete the attribute instance + */ + @Test + public void deleteAttributeInstanceIfDatasetIdExistTest() { + try { + Mockito.doReturn(payloadNode).when(objectMapper).readTree(any(String.class)); + entityService.deleteFields(entityPayload, "http://example.org/vehicle/speed","urn:ngsi-ld:Property:speedometerA4567-speed",null); + } catch (Exception ex) { + Assert.fail(); + } + } + + /** + * this method is use for all delete the attribute + */ + @Test + public void deleteAllAttributeInstanceTest() { + try { + Mockito.doReturn(payloadNode).when(objectMapper).readTree(any(String.class)); + entityService.deleteFields(entityPayload, "http://example.org/vehicle/speed",null,"true"); + } catch (Exception ex) { + Assert.fail(); + } + } +} diff --git a/scorpio-broker/Core/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/validationutil/ValidationUtilTest.java b/scorpio-broker/Core/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/validationutil/ValidationUtilTest.java new file mode 100644 index 0000000000000000000000000000000000000000..ac35bfbd8eaeabb240fee3879e06c6c0dcd4d719 --- /dev/null +++ b/scorpio-broker/Core/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/validationutil/ValidationUtilTest.java @@ -0,0 +1,194 @@ +package eu.neclab.ngsildbroker.entityhandler.validationutil; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.datatypes.Relationship; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; + + +@SpringBootTest(properties= {"spring.main.allow-bean-definition-overriding=true"}) +@RunWith(SpringRunner.class) +public class ValidationUtilTest { + + TypeValidationRule typeValidation; + PropertyValidatioRule propertyValidation; + RelationshipValidationRule relationshipValidation; + IdValidationRule idValidation; + Entity entity; + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Before + public void setup() { + typeValidation=new TypeValidationRule(); + propertyValidation=new PropertyValidatioRule(); + relationshipValidation=new RelationshipValidationRule(); + idValidation=new IdValidationRule(); + + String payload=" {\r\n" + + " \"http://example.org/vehicle/brandName\": [\r\n" + + " {\r\n" + + " \"@type\": [\r\n" + + " \"https://uri.etsi.org/ngsi-ld/Property\"\r\n" + + " ],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\": [\r\n" + + " {\r\n" + + + " \"@value\": \"Volvo\"\r\n" + + " }]\r\n" + + + " }\r\n" + + " ],\r\n" + + " \"@id\": \"urn:ngsi-ld:Vehicle:B9211\",\r\n" + + " \"http://example.org/vehicle/speed\": [\r\n" + + " {\r\n" + + " \"https://uri.etsi.org/ngsi-ld/instanceId\": [\r\n" + + " {\r\n" + + " \"@value\": \"be664aaf-a7af-4a99-bebc-e89528238abf\"\r\n" + + " }\r\n" + + " ],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/observedAt\": [\r\n" + + " {\r\n" + + " \"@type\": \"https://uri.etsi.org/ngsi-ld/DateTime\",\r\n" + + " \"@value\": \"2018-06-01T12:03:00Z\"\r\n" + + " }\r\n" + + " ],\r\n" + + " \"@type\": [\r\n" + + " \"https://uri.etsi.org/ngsi-ld/Property\"\r\n" + + " ],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\": [\r\n" + + " {\r\n" + + " \"@value\": \"120\"\r\n" + + " }\r\n" + + " ]\r\n" + + " },\r\n" + + " {\r\n" + + " \"https://uri.etsi.org/ngsi-ld/instanceId\": [\r\n" + + " {\r\n" + + " \"@value\": \"d3ac28df-977f-4151-a432-dc088f7400d7\"\r\n" + + " }\r\n" + + " ],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/observedAt\": [\r\n" + + " {\r\n" + + " \"@type\": \"https://uri.etsi.org/ngsi-ld/DateTime\",\r\n" + + " \"@value\": \"2018-08-01T12:05:00Z\"\r\n" + + " }\r\n" + + " ],\r\n" + + " \"@type\": [\r\n" + + " \"https://uri.etsi.org/ngsi-ld/Property\"\r\n" + + " ],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\": [\r\n" + + " {\r\n" + + " \"@value\": \"80\"\r\n" + + " }\r\n" + + " ]\r\n" + + " }\r\n" + + " ],\r\n" + + " \"@type\": [\r\n" + + " \"http://example.org/vehicle/Vehicle\"\r\n" + + " ]\r\n" + + " }\r\n"; + + entity=DataSerializer.getEntity(payload); + } + + @After + public void tearDown() { + typeValidation=null; + } + + @Test + public void typeValidationTest() { + try { + Assert.assertTrue(typeValidation.validateEntity(entity, null)); + }catch(Exception ex) { + Assert.fail(); + } + } + + @Test + public void typeValidationFailureTest() throws ResponseException{ + thrown.expect(ResponseException.class); + thrown.expectMessage("@type is not recognized"); + + entity.setType("abc"); + typeValidation.validateEntity(entity, null); + } + + @Test + public void idValidationTest(){ + try { + Assert.assertTrue(idValidation.validateEntity(entity, null)); + }catch(Exception ex) { + Assert.fail(); + } + } + + @Test + public void idValidationFailureTest() throws ResponseException, URISyntaxException{ + thrown.expect(ResponseException.class); + thrown.expectMessage("id is not a URI"); + + entity.setId(new URI("abc")); + idValidation.validateEntity(entity, null); + } + + @Test + public void propertyValidationTest() { + try { + Assert.assertTrue(propertyValidation.validateEntity(entity, null)); + }catch(Exception ex) { + Assert.fail(); + } + } + + @Test + public void propertyValidationFailureTest() throws ResponseException { + thrown.expect(ResponseException.class); + thrown.expectMessage("Entity with a property value equal to null"); + + entity.getProperties().get(0).setEntries(null); + propertyValidation.validateEntity(entity, null); + + } + + @Test + public void relationshipValidationTest(){ + try { + Assert.assertTrue(relationshipValidation.validateEntity(entity, null)); + }catch(Exception ex) { + Assert.fail(); + } + } + + @Test + public void relationshipValidationFailureTest() throws ResponseException { + thrown.expect(ResponseException.class); + thrown.expectMessage("Entity with a Relationship object equal to null"); + + Relationship rel=new Relationship(); + rel.setObjects(null); + ArrayList relsList=new ArrayList<>(); + relsList.add(rel); + entity.setRelationships(relsList); + + entity.getRelationships().get(0).setObjects(null); + relationshipValidation.validateEntity(entity, null); + + } +} diff --git a/scorpio-broker/Core/InfoServer/.gitignore b/scorpio-broker/Core/InfoServer/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..909c51ad8a826a2e4b90278784d4820bdb341b39 --- /dev/null +++ b/scorpio-broker/Core/InfoServer/.gitignore @@ -0,0 +1,90 @@ +/target/ +/logs/ + +.metadata +bin/ +tmp/ +*.tmp +*.bak +*.swp +*~.nib +local.properties +.settings/ +.loadpath +.recommenders + +# External tool builders +.externalToolBuilders/ + +# Locally stored "Eclipse launch configurations" +*.launch + +# PyDev specific (Python IDE for Eclipse) +*.pydevproject + +# CDT-specific (C/C++ Development Tooling) +.cproject + +# CDT- autotools +.autotools + +# Java annotation processor (APT) +.factorypath + +# PDT-specific (PHP Development Tools) +.buildpath + +# sbteclipse plugin +.target + +# Tern plugin +.tern-project + +# TeXlipse plugin +.texlipse + +# STS (Spring Tool Suite) +.springBeans + +# Code Recommenders +.recommenders/ + +# Annotation Processing +.apt_generated/ + +# Scala IDE specific (Scala & Java development for Eclipse) +.cache-main +.scala_dependencies +.worksheet + +### Eclipse Patch ### +# Eclipse Core +.project + +# JDT-specific (Eclipse Java Development Tools) +.classpath + +# Annotation Processing +.apt_generated + +### Java ### +# Compiled class file +*.class + +# Log file +*.log + +# BlueJ files +*.ctxt + +# Mobile Tools for Java (J2ME) +.mtj.tmp/ + +# Package Files # +*.jar +*.war +*.nar +*.ear +*.zip +*.tar.gz +*.rar \ No newline at end of file diff --git a/scorpio-broker/Core/InfoServer/dockerfile4maven b/scorpio-broker/Core/InfoServer/dockerfile4maven new file mode 100644 index 0000000000000000000000000000000000000000..995b640a12f1afacbe6911af2b383768d2e333f1 --- /dev/null +++ b/scorpio-broker/Core/InfoServer/dockerfile4maven @@ -0,0 +1,14 @@ +FROM openjdk:22-ea-21-jdk-slim + +WORKDIR /usr/src/scorpio +ARG JAR_FILE_BUILD +ARG JAR_FILE_RUN +ENV JAR_FILE_RUN ${JAR_FILE_RUN} + +COPY target/${JAR_FILE_BUILD} ./${JAR_FILE_RUN} +COPY src/main/resources/application-dist.yml ./config/application.yml + +ENV spring_args "" + +CMD java -jar $JAR_FILE_RUN ${spring_args} + diff --git a/scorpio-broker/Core/InfoServer/pom.xml b/scorpio-broker/Core/InfoServer/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..74b9c22f729f98b2d55c3011736d67b333895b75 --- /dev/null +++ b/scorpio-broker/Core/InfoServer/pom.xml @@ -0,0 +1,59 @@ + + 4.0.0 + InfoServer + jar + 1.0.0-SNAPSHOT + + eu.neclab.ngsildbroker + BrokerParent + 1.0.0-SNAPSHOT + ../../BrokerParent + + + + docker + + + + com.spotify + dockerfile-maven-plugin + 1.4.12 + + + default + + build + push + + + + + dockerfile4maven + scorpiobroker/scorpio + ${project.artifactId}_${project.version} + + ${project.build.finalName}.jar + ${project.artifactId}.jar + + + + + + + + + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-logging + + + + + + diff --git a/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/InfoServer.java b/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/InfoServer.java new file mode 100644 index 0000000000000000000000000000000000000000..5df088e5488484925ba34f366791d6fd78dcfb46 --- /dev/null +++ b/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/InfoServer.java @@ -0,0 +1,32 @@ +package eu.neclab.ngsildbroker.infoserver; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Import; + +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; +import eu.neclab.ngsildbroker.commons.securityConfig.SecurityConfig; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaConfig; +import eu.neclab.ngsildbroker.commons.swaggerConfig.SwaggerConfigDetails; + + +@SpringBootApplication +@Import({KafkaConfig.class, SwaggerConfigDetails.class}) +public class InfoServer {// implements QueryHandlerInterface{ + + public static void main(String[] args) { + SpringApplication.run(InfoServer.class, args); + } + + + @Bean + SecurityConfig securityConfig() { + return new SecurityConfig(); + } + + @Bean + ResourceConfigDetails resourceConfigDetails() { + return new ResourceConfigDetails(); + } +} diff --git a/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/config/InfoServerResourceConfigurer.java b/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/config/InfoServerResourceConfigurer.java new file mode 100644 index 0000000000000000000000000000000000000000..7e92ea6727dc56e9469c848031ffed4f0439404d --- /dev/null +++ b/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/config/InfoServerResourceConfigurer.java @@ -0,0 +1,27 @@ +package eu.neclab.ngsildbroker.infoserver.config; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer; +import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter; +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; + +/** + * REST API Resource Server. + */ +@Configuration +@EnableWebSecurity +@EnableResourceServer +@EnableGlobalMethodSecurity(prePostEnabled = true) // Allow method annotations like @PreAuthorize +public class InfoServerResourceConfigurer extends ResourceServerConfigurerAdapter { + @Autowired + private ResourceConfigDetails resourceConfigDetails; + + @Override + public void configure(HttpSecurity http) throws Exception { + resourceConfigDetails.ngbSecurityConfig(http); + } +} diff --git a/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/controller/CoreContextController.java b/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/controller/CoreContextController.java new file mode 100644 index 0000000000000000000000000000000000000000..f7599be99846910986d3b2830927a302fc0d6aea --- /dev/null +++ b/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/controller/CoreContextController.java @@ -0,0 +1,179 @@ +package eu.neclab.ngsildbroker.infoserver.controller; + +import javax.servlet.http.HttpServletRequest; + +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@RequestMapping("/corecontext") +public class CoreContextController { + + + private final String CORE_CONTEXT = "{\r\n" + + " \"@context\": {\r\n" + + " \"ngsi-ld\": \"https://uri.etsi.org/ngsi-ld/\", \r\n" + + " \"id\": \"@id\",\r\n" + + " \"type\": \"@type\",\r\n" + + " \"value\": \"https://uri.etsi.org/ngsi-ld/hasValue\",\r\n" + + " \"object\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/hasObject\",\r\n" + + " \"@type\":\"@id\"\r\n" + + " },\r\n" + + " \"Property\": \"https://uri.etsi.org/ngsi-ld/Property\",\r\n" + + " \"Relationship\": \"https://uri.etsi.org/ngsi-ld/Relationship\",\r\n" + + " \"DateTime\": \"https://uri.etsi.org/ngsi-ld/DateTime\",\r\n" + + " \"Date\": \"https://uri.etsi.org/ngsi-ld/Date\",\r\n" + + " \"Time\": \"https://uri.etsi.org/ngsi-ld/Time\",\r\n" + + " \"createdAt\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/createdAt\",\r\n" + + " \"@type\": \"DateTime\"\r\n" + + " },\r\n" + + " \"modifiedAt\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/modifiedAt\",\r\n" + + " \"@type\": \"DateTime\"\r\n" + + " },\r\n" + + " \"observedAt\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/observedAt\",\r\n" + + " \"@type\": \"DateTime\"\r\n" + + " },\r\n" + + " \"datasetId\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/datasetId\",\r\n" + + " \"@type\": \"@id\"\r\n" + + " },\r\n" + + " \"instanceId\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/instanceId\",\r\n" + + " \"@type\": \"@id\"\r\n" + + " },\r\n" + + " \"unitCode\": \"https://uri.etsi.org/ngsi-ld/unitCode\",\r\n" + + " \"location\": \"https://uri.etsi.org/ngsi-ld/location\",\r\n" + + " \"observationSpace\": \"https://uri.etsi.org/ngsi-ld/observationSpace\",\r\n" + + " \"operationSpace\": \"https://uri.etsi.org/ngsi-ld/operationSpace\",\r\n" + + " \"GeoProperty\": \"https://uri.etsi.org/ngsi-ld/GeoProperty\",\r\n" + + " \"TemporalProperty\": \"https://uri.etsi.org/ngsi-ld/TemporalProperty\",\r\n" + + " \"ContextSourceRegistration\": \"https://uri.etsi.org/ngsi-ld/ContextSourceRegistration\",\r\n" + + " \"Subscription\": \"https://uri.etsi.org/ngsi-ld/Subscription\", \r\n" + + " \"Notification\": \"https://uri.etsi.org/ngsi-ld/Notification\",\r\n" + + " \"ContextSourceNotification\": \"https://uri.etsi.org/ngsi-ld/ContextSourceNotification\",\r\n" + + " \"title\": \"https://uri.etsi.org/ngsi-ld/title\",\r\n" + + " \"detail\": \"https://uri.etsi.org/ngsi-ld/detail\",\r\n" + + " \"idPattern\": \"https://uri.etsi.org/ngsi-ld/idPattern\",\r\n" + + " \"name\": \"https://uri.etsi.org/ngsi-ld/name\",\r\n" + + " \"description\": \"https://uri.etsi.org/ngsi-ld/description\",\r\n" + + " \"information\": \"https://uri.etsi.org/ngsi-ld/information\",\r\n" + + " \"observationInterval\": \"https://uri.etsi.org/ngsi-ld/observationInterval\",\r\n" + + " \"managementInterval\": \"https://uri.etsi.org/ngsi-ld/managementInterval\",\r\n" + + " \"expires\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/expires\",\r\n" + + " \"@type\": \"DateTime\"\r\n" + + " },\r\n" + + " \"endpoint\": \"https://uri.etsi.org/ngsi-ld/endpoint\",\r\n" + + " \"entities\": \"https://uri.etsi.org/ngsi-ld/entities\",\r\n" + + " \"properties\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/properties\",\r\n" + + " \"@type\": \"@vocab\"\r\n" + + " },\r\n" + + " \"relationships\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/relationships\",\r\n" + + " \"@type\": \"@vocab\"\r\n" + + " },\r\n" + + " \"start\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/start\",\r\n" + + " \"@type\": \"DateTime\"\r\n" + + " },\r\n" + + " \"end\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/end\",\r\n" + + " \"@type\": \"DateTime\"\r\n" + + " },\r\n" + + " \"watchedAttributes\":{\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/watchedAttributes\",\r\n" + + " \"@type\": \"@vocab\"\r\n" + + " },\r\n" + + " \"timeInterval\": \"https://uri.etsi.org/ngsi-ld/timeInterval\",\r\n" + + " \"q\": \"https://uri.etsi.org/ngsi-ld/q\",\r\n" + + " \"geoQ\": \"https://uri.etsi.org/ngsi-ld/geoQ\",\r\n" + + " \"csf\": \"https://uri.etsi.org/ngsi-ld/csf\",\r\n" + + " \"isActive\": \"https://uri.etsi.org/ngsi-ld/isActive\",\r\n" + + " \"notification\": \"https://uri.etsi.org/ngsi-ld/notification\",\r\n" + + " \"status\": \"https://uri.etsi.org/ngsi-ld/status\",\r\n" + + " \"throttling\": \"https://uri.etsi.org/ngsi-ld/throttling\",\r\n" + + " \"temporalQ\": \"https://uri.etsi.org/ngsi-ld/temporalQ\",\r\n" + + " \"geometry\": \"https://uri.etsi.org/ngsi-ld/geometry\",\r\n" + + " \"coordinates\": \"https://uri.etsi.org/ngsi-ld/coordinates\",\r\n" + + " \"georel\": \"https://uri.etsi.org/ngsi-ld/georel\",\r\n" + + " \"geoproperty\": \"https://uri.etsi.org/ngsi-ld/geoproperty\",\r\n" + + " \"attributes\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/attributes\",\r\n" + + " \"@type\": \"@vocab\"\r\n" + + " },\r\n" + + " \"format\": \"https://uri.etsi.org/ngsi-ld/format\",\r\n" + + " \"timesSent\": \"https://uri.etsi.org/ngsi-ld/timesSent\",\r\n" + + " \"lastNotification\":{\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/lastNotification\",\r\n" + + " \"@type\": \"DateTime\"\r\n" + + " },\r\n" + + " \"lastFailure\":{\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/lastFailure\",\r\n" + + " \"@type\": \"DateTime\"\r\n" + + " },\r\n" + + " \"lastSuccess\":{\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/lastSuccess\",\r\n" + + " \"@type\": \"DateTime\"\r\n" + + " },\r\n" + + " \"uri\": \"https://uri.etsi.org/ngsi-ld/uri\",\r\n" + + " \"accept\": \"https://uri.etsi.org/ngsi-ld/accept\",\r\n" + + " \"success\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/success\",\r\n" + + " \"@type\": \"@id\"\r\n" + + " },\r\n" + + " \"errors\": \"https://uri.etsi.org/ngsi-ld/errors\",\r\n" + + " \"error\": \"https://uri.etsi.org/ngsi-ld/error\",\r\n" + + " \"entityId\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/entityId\",\r\n" + + " \"@type\": \"@id\"\r\n" + + " },\r\n" + + " \"updated\": \"https://uri.etsi.org/ngsi-ld/updated\",\r\n" + + " \"unchanged\": \"https://uri.etsi.org/ngsi-ld/unchanged\",\r\n" + + " \"attributeName\": \"https://uri.etsi.org/ngsi-ld/attributeName\",\r\n" + + " \"reason\": \"https://uri.etsi.org/ngsi-ld/reason\",\r\n" + + " \"timerel\": \"https://uri.etsi.org/ngsi-ld/timerel\",\r\n" + + " \"time\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/time\",\r\n" + + " \"@type\": \"DateTime\"\r\n" + + " },\r\n" + + " \"endTime\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/endTime\",\r\n" + + " \"@type\": \"DateTime\"\r\n" + + " },\r\n" + + " \"timeproperty\": \"https://uri.etsi.org/ngsi-ld/timeproperty\",\r\n" + + " \"subscriptionId\": {\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/subscriptionId\",\r\n" + + " \"@type\": \"@id\"\r\n" + + " },\r\n" + + " \"notifiedAt\":{\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/notifiedAt\",\r\n" + + " \"@type\": \"DateTime\"\r\n" + + " },\r\n" + + " \"data\": \"https://uri.etsi.org/ngsi-ld/data\",\r\n" + + " \"triggerReason\": \"https://uri.etsi.org/ngsi-ld/triggerReason\",\r\n" + + " \"values\":{\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/hasValues\",\r\n" + + " \"@container\": \"@list\"\r\n" + + " },\r\n" + + " \"objects\":{\r\n" + + " \"@id\": \"https://uri.etsi.org/ngsi-ld/hasObjects\",\r\n" + + " \"@type\": \"@id\",\r\n" + + " \"@container\": \"@list\"\r\n" + + " },\r\n" + + " \"@vocab\": \"https://uri.etsi.org/ngsi-ld/default-context/\"\r\n" + + " }\r\n" + + "}"; + @GetMapping + public ResponseEntity getHealth(HttpServletRequest request) { + return ResponseEntity.status(HttpStatus.ACCEPTED).header("Content-Type", "application/json") + .body(CORE_CONTEXT); + } +} diff --git a/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/controller/InfoServerController.java b/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/controller/InfoServerController.java new file mode 100644 index 0000000000000000000000000000000000000000..0f92dad47d5ee3f7a7c8c350e45a1ed54303e6e5 --- /dev/null +++ b/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/controller/InfoServerController.java @@ -0,0 +1,121 @@ +package eu.neclab.ngsildbroker.infoserver.controller; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; + +import javax.annotation.PostConstruct; +import javax.servlet.http.HttpServletRequest; + +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.exceptions.HttpErrorResponseException; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; + +@RestController +@RequestMapping("/health") +public class InfoServerController { + private static final int QUERY_MANAGER = 0; + private static final int ENTITY_MANAGER = 1; + private static final int STORAGE_MANAGER = 2; + private static final int SUBSCRIPTION_MANAGER = 3; + private static final int REGISTRY_MANAGER = 4; + private static final int HISTORY_MANAGER = 5; + HashMap headers = new HashMap(); + HashMap microService2Uri = new HashMap(); + HashMap microService2SuccessReply = new HashMap(); + HashMap microService2HttpMethod = new HashMap(); + String dummyMessage = "{\r\n" + + " \"id\": \"NOTANURI\",\r\n" + + " \"type\": \"https://uri.fiware.org/ns/data-models#AirQualityObserved\"\r\n" + + " \r\n" + + "}"; + + { + try { + headers.put("Accept", AppConstants.NGB_APPLICATION_JSON); + microService2Uri.put(QUERY_MANAGER, new URI("http://localhost:9090/ngsi-ld/v1/entities/")); + microService2Uri.put(ENTITY_MANAGER, new URI("http://localhost:9090/ngsi-ld/v1/entities/")); + microService2Uri.put(STORAGE_MANAGER, new URI("http://localhost:9090/scorpio/v1/info/")); + microService2Uri.put(SUBSCRIPTION_MANAGER, new URI("http://localhost:9090/ngsi-ld/v1/subscriptions/")); + microService2Uri.put(REGISTRY_MANAGER, new URI("http://localhost:9090/ngsi-ld/v1/csourceRegistrations/")); + microService2Uri.put(HISTORY_MANAGER, new URI("http://localhost:9090/ngsi-ld/v1/temporal/entities/")); + + microService2SuccessReply.put(QUERY_MANAGER, 400); + microService2SuccessReply.put(ENTITY_MANAGER, 400); + microService2SuccessReply.put(STORAGE_MANAGER, 200); + microService2SuccessReply.put(SUBSCRIPTION_MANAGER, 200); + microService2SuccessReply.put(REGISTRY_MANAGER, 400); + microService2SuccessReply.put(HISTORY_MANAGER, 400); + + microService2HttpMethod.put(QUERY_MANAGER, 0); + microService2HttpMethod.put(ENTITY_MANAGER, 1); + microService2HttpMethod.put(STORAGE_MANAGER, 0); + microService2HttpMethod.put(SUBSCRIPTION_MANAGER, 0); + microService2HttpMethod.put(REGISTRY_MANAGER, 0); + microService2HttpMethod.put(HISTORY_MANAGER, 0); + } catch (URISyntaxException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + + + HttpUtils httpUtils = HttpUtils.getInstance(null); + + @PostConstruct + private void setup() { + } + + @GetMapping + public ResponseEntity getHealth(HttpServletRequest request) { + HashMap result = new HashMap(); + + result.put("Status of Querymanager", getStatus(QUERY_MANAGER)); + result.put("Status of Entitymanager", getStatus(ENTITY_MANAGER)); + result.put("Status of Storagemanager", getStatus(STORAGE_MANAGER)); + result.put("Status of Subscriptionmanager", getStatus(SUBSCRIPTION_MANAGER)); + result.put("Status of Registrymanager", getStatus(REGISTRY_MANAGER)); + result.put("Status of Historymanager", getStatus(HISTORY_MANAGER)); + return ResponseEntity.status(HttpStatus.ACCEPTED).header("Content-Type", "application/json") + .body(DataSerializer.toJson(result)); + } + + private String getStatus(int component) { + URI uri = microService2Uri.get(component); + Integer success = microService2SuccessReply.get(component); + try { + switch (microService2HttpMethod.get(component)) { + case 0: + httpUtils.doGet(uri, headers); + return "Up and running"; + case 1: + httpUtils.doPost(uri, dummyMessage, null); + return "Up and running"; + default: + return "Unable to determine status"; + } + + } catch (IOException e) { + if(e instanceof HttpErrorResponseException) { + HttpErrorResponseException httpError = (HttpErrorResponseException) e; + if(httpError.getStatusCode() == success) { + return "Up and running"; + } + } + } + return "Not running"; + + + } + + +} diff --git a/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/controller/VersionController.java b/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/controller/VersionController.java new file mode 100644 index 0000000000000000000000000000000000000000..b89f8a7b032c2707cf2a7c132b3ef1ac68249a42 --- /dev/null +++ b/scorpio-broker/Core/InfoServer/src/main/java/eu/neclab/ngsildbroker/infoserver/controller/VersionController.java @@ -0,0 +1,21 @@ +package eu.neclab.ngsildbroker.infoserver.controller; + +import javax.servlet.http.HttpServletRequest; + +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@RequestMapping("/version") +public class VersionController { + + private final String RESULT = "{\r\n" + " \"scorpio version\": \"1.0.0\"\r\n" + "}"; + + @GetMapping + public ResponseEntity getHealth(HttpServletRequest request) { + return ResponseEntity.status(HttpStatus.ACCEPTED).header("Content-Type", "application/json").body(RESULT); + } +} diff --git a/scorpio-broker/Core/InfoServer/src/main/resources/application-aaio.yml b/scorpio-broker/Core/InfoServer/src/main/resources/application-aaio.yml new file mode 100644 index 0000000000000000000000000000000000000000..1004024b4ff01b4211925f0a1beaf660aa836cd9 --- /dev/null +++ b/scorpio-broker/Core/InfoServer/src/main/resources/application-aaio.yml @@ -0,0 +1,17 @@ +server: + port: 27015 + +spring: + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + +bootstrap: + servers: kafka:9092 + +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ \ No newline at end of file diff --git a/scorpio-broker/Core/InfoServer/src/main/resources/application-aio.yml b/scorpio-broker/Core/InfoServer/src/main/resources/application-aio.yml new file mode 100644 index 0000000000000000000000000000000000000000..d845a75d2eecaa65623df2f76b5c95a0c1f73764 --- /dev/null +++ b/scorpio-broker/Core/InfoServer/src/main/resources/application-aio.yml @@ -0,0 +1,17 @@ +server: + port: 27015 + +spring: + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + +bootstrap: + servers: localhost:9092 + +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ \ No newline at end of file diff --git a/scorpio-broker/Core/InfoServer/src/main/resources/application-dist.yml b/scorpio-broker/Core/InfoServer/src/main/resources/application-dist.yml new file mode 100644 index 0000000000000000000000000000000000000000..b54d188c4bd4b7f410b5b350468f05cbb25ef659 --- /dev/null +++ b/scorpio-broker/Core/InfoServer/src/main/resources/application-dist.yml @@ -0,0 +1,17 @@ +server: + port: 27015 + +spring: + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + +bootstrap: + servers: kafka:9092 + +eureka: + client: + serviceUrl: + defaultZone: http://eureka:8761/eureka/ \ No newline at end of file diff --git a/scorpio-broker/Core/InfoServer/src/main/resources/application.yml b/scorpio-broker/Core/InfoServer/src/main/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..e8a9ed43dc2b40304eb4552290c647397a7d10b3 --- /dev/null +++ b/scorpio-broker/Core/InfoServer/src/main/resources/application.yml @@ -0,0 +1,55 @@ +spring: + application: + name: info-server + main: + lazy-initialization: true + allow-bean-definition-overriding: true + kafka: + admin: + properties: + cleanup: + policy: compact + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + bindings: + ATCONTEXT_WRITE_CHANNEL: + destination: ATCONTEXT + contentType: application/json + +server: + port: 27015 + tomcat: + max: + threads: 20 +#Entity-Manager properties +entity: + topic: ENTITY +query: + topic: QUERY + result: + topic: QUERY_RESULT +csource: + query: + topic: CONTEXT_REGISTRY_QUERY + +bootstrap: + servers: localhost:9092 +#enable log compaction + + +management: + endpoints: + web: + exposure: + include: "*" + endpoint: + restart: + enabled: true + + + + + \ No newline at end of file diff --git a/scorpio-broker/Core/InfoServer/src/main/resources/log4j2-spring.xml b/scorpio-broker/Core/InfoServer/src/main/resources/log4j2-spring.xml new file mode 100644 index 0000000000000000000000000000000000000000..5c5ba04ba96e8a180228217253fdfb248a16302f --- /dev/null +++ b/scorpio-broker/Core/InfoServer/src/main/resources/log4j2-spring.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + %d %p %C{1.} [%t] %m%n + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/scorpio-broker/Core/InfoServer/src/main/resources/ngsi-ld-core-context.jsonld b/scorpio-broker/Core/InfoServer/src/main/resources/ngsi-ld-core-context.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..fb43242d5a69cf2b5c0e7f22a45e85c1750f1368 --- /dev/null +++ b/scorpio-broker/Core/InfoServer/src/main/resources/ngsi-ld-core-context.jsonld @@ -0,0 +1,158 @@ +{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "id": "@id", + "type": "@type", + "value": "https://uri.etsi.org/ngsi-ld/hasValue", + "object": { + "@id": "https://uri.etsi.org/ngsi-ld/hasObject", + "@type":"@id" + }, + "Property": "https://uri.etsi.org/ngsi-ld/Property", + "Relationship": "https://uri.etsi.org/ngsi-ld/Relationship", + "DateTime": "https://uri.etsi.org/ngsi-ld/DateTime", + "Date": "https://uri.etsi.org/ngsi-ld/Date", + "Time": "https://uri.etsi.org/ngsi-ld/Time", + "createdAt": { + "@id": "https://uri.etsi.org/ngsi-ld/createdAt", + "@type": "DateTime" + }, + "modifiedAt": { + "@id": "https://uri.etsi.org/ngsi-ld/modifiedAt", + "@type": "DateTime" + }, + "observedAt": { + "@id": "https://uri.etsi.org/ngsi-ld/observedAt", + "@type": "DateTime" + }, + "datasetId": { + "@id": "https://uri.etsi.org/ngsi-ld/datasetId", + "@type": "@id" + }, + "instanceId": { + "@id": "https://uri.etsi.org/ngsi-ld/instanceId", + "@type": "@id" + }, + "unitCode": "https://uri.etsi.org/ngsi-ld/unitCode", + "location": "https://uri.etsi.org/ngsi-ld/location", + "observationSpace": "https://uri.etsi.org/ngsi-ld/observationSpace", + "operationSpace": "https://uri.etsi.org/ngsi-ld/operationSpace", + "GeoProperty": "https://uri.etsi.org/ngsi-ld/GeoProperty", + "TemporalProperty": "https://uri.etsi.org/ngsi-ld/TemporalProperty", + "ContextSourceRegistration": "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration", + "Subscription": "https://uri.etsi.org/ngsi-ld/Subscription", + "Notification": "https://uri.etsi.org/ngsi-ld/Notification", + "ContextSourceNotification": "https://uri.etsi.org/ngsi-ld/ContextSourceNotification", + "title": "https://uri.etsi.org/ngsi-ld/title", + "detail": "https://uri.etsi.org/ngsi-ld/detail", + "idPattern": "https://uri.etsi.org/ngsi-ld/idPattern", + "name": "https://uri.etsi.org/ngsi-ld/name", + "description": "https://uri.etsi.org/ngsi-ld/description", + "information": "https://uri.etsi.org/ngsi-ld/information", + "observationInterval": "https://uri.etsi.org/ngsi-ld/observationInterval", + "managementInterval": "https://uri.etsi.org/ngsi-ld/managementInterval", + "expires": { + "@id": "https://uri.etsi.org/ngsi-ld/expires", + "@type": "DateTime" + }, + "endpoint": "https://uri.etsi.org/ngsi-ld/endpoint", + "entities": "https://uri.etsi.org/ngsi-ld/entities", + "properties": { + "@id": "https://uri.etsi.org/ngsi-ld/properties", + "@type": "@vocab" + }, + "relationships": { + "@id": "https://uri.etsi.org/ngsi-ld/relationships", + "@type": "@vocab" + }, + "start": { + "@id": "https://uri.etsi.org/ngsi-ld/start", + "@type": "DateTime" + }, + "end": { + "@id": "https://uri.etsi.org/ngsi-ld/end", + "@type": "DateTime" + }, + "watchedAttributes":{ + "@id": "https://uri.etsi.org/ngsi-ld/watchedAttributes", + "@type": "@vocab" + }, + "timeInterval": "https://uri.etsi.org/ngsi-ld/timeInterval", + "q": "https://uri.etsi.org/ngsi-ld/q", + "geoQ": "https://uri.etsi.org/ngsi-ld/geoQ", + "csf": "https://uri.etsi.org/ngsi-ld/csf", + "isActive": "https://uri.etsi.org/ngsi-ld/isActive", + "notification": "https://uri.etsi.org/ngsi-ld/notification", + "status": "https://uri.etsi.org/ngsi-ld/status", + "throttling": "https://uri.etsi.org/ngsi-ld/throttling", + "temporalQ": "https://uri.etsi.org/ngsi-ld/temporalQ", + "geometry": "https://uri.etsi.org/ngsi-ld/geometry", + "coordinates": "https://uri.etsi.org/ngsi-ld/coordinates", + "georel": "https://uri.etsi.org/ngsi-ld/georel", + "geoproperty": "https://uri.etsi.org/ngsi-ld/geoproperty", + "attributes": { + "@id": "https://uri.etsi.org/ngsi-ld/attributes", + "@type": "@vocab" + }, + "format": "https://uri.etsi.org/ngsi-ld/format", + "timesSent": "https://uri.etsi.org/ngsi-ld/timesSent", + "lastNotification":{ + "@id": "https://uri.etsi.org/ngsi-ld/lastNotification", + "@type": "DateTime" + }, + "lastFailure":{ + "@id": "https://uri.etsi.org/ngsi-ld/lastFailure", + "@type": "DateTime" + }, + "lastSuccess":{ + "@id": "https://uri.etsi.org/ngsi-ld/lastSuccess", + "@type": "DateTime" + }, + "uri": "https://uri.etsi.org/ngsi-ld/uri", + "accept": "https://uri.etsi.org/ngsi-ld/accept", + "success": { + "@id": "https://uri.etsi.org/ngsi-ld/success", + "@type": "@id" + }, + "errors": "https://uri.etsi.org/ngsi-ld/errors", + "error": "https://uri.etsi.org/ngsi-ld/error", + "entityId": { + "@id": "https://uri.etsi.org/ngsi-ld/entityId", + "@type": "@id" + }, + "updated": "https://uri.etsi.org/ngsi-ld/updated", + "unchanged": "https://uri.etsi.org/ngsi-ld/unchanged", + "attributeName": "https://uri.etsi.org/ngsi-ld/attributeName", + "reason": "https://uri.etsi.org/ngsi-ld/reason", + "timerel": "https://uri.etsi.org/ngsi-ld/timerel", + "time": { + "@id": "https://uri.etsi.org/ngsi-ld/time", + "@type": "DateTime" + }, + "endTime": { + "@id": "https://uri.etsi.org/ngsi-ld/endTime", + "@type": "DateTime" + }, + "timeproperty": "https://uri.etsi.org/ngsi-ld/timeproperty", + "subscriptionId": { + "@id": "https://uri.etsi.org/ngsi-ld/subscriptionId", + "@type": "@id" + }, + "notifiedAt":{ + "@id": "https://uri.etsi.org/ngsi-ld/notifiedAt", + "@type": "DateTime" + }, + "data": "https://uri.etsi.org/ngsi-ld/data", + "triggerReason": "https://uri.etsi.org/ngsi-ld/triggerReason", + "values":{ + "@id": "https://uri.etsi.org/ngsi-ld/hasValues", + "@container": "@list" + }, + "objects":{ + "@id": "https://uri.etsi.org/ngsi-ld/hasObjects", + "@type": "@id", + "@container": "@list" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } +} diff --git a/scorpio-broker/Core/QueryManager/.gitignore b/scorpio-broker/Core/QueryManager/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..a1c3ab4d08c0f9f91918f21c730272a4711885e8 --- /dev/null +++ b/scorpio-broker/Core/QueryManager/.gitignore @@ -0,0 +1,4 @@ +/target/ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/Core/QueryManager/dockerfile4maven b/scorpio-broker/Core/QueryManager/dockerfile4maven new file mode 100644 index 0000000000000000000000000000000000000000..8a9cad9642bdedb91f078bb58edd160842fd8ec8 --- /dev/null +++ b/scorpio-broker/Core/QueryManager/dockerfile4maven @@ -0,0 +1,13 @@ +FROM openjdk:22-ea-21-jdk-slim + +WORKDIR /usr/src/scorpio +ARG JAR_FILE_BUILD +ARG JAR_FILE_RUN +ENV JAR_FILE_RUN ${JAR_FILE_RUN} + +COPY target/${JAR_FILE_BUILD} ./${JAR_FILE_RUN} +COPY src/main/resources/application-dist.yml ./config/application.yml + +ENV spring_args "" + +CMD java -jar $JAR_FILE_RUN ${spring_args} diff --git a/scorpio-broker/Core/QueryManager/pom.xml b/scorpio-broker/Core/QueryManager/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..45846e0d9213ba99d91beb4ae9979af529caad8d --- /dev/null +++ b/scorpio-broker/Core/QueryManager/pom.xml @@ -0,0 +1,95 @@ + + 4.0.0 + QueryManager + jar + + eu.neclab.ngsildbroker + BrokerParent + 1.0.0-SNAPSHOT + ../../BrokerParent + + 1.0.0-SNAPSHOT + + + docker + + + + com.spotify + dockerfile-maven-plugin + 1.4.12 + + + default + + build + push + + + + + dockerfile4maven + scorpiobroker/scorpio + ${project.artifactId}_${project.version} + + ${project.build.finalName}.jar + ${project.artifactId}.jar + + + + + + + + + + + + + + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-logging + + + + + + org.powermock + powermock-module-junit4 + 2.0.2 + test + + + + org.powermock + powermock-api-mockito2 + 2.0.2 + test + + + + org.springframework.boot + spring-boot-starter-jdbc + + + org.springframework.boot + spring-boot-starter-logging + + + + + org.postgresql + postgresql + + + + diff --git a/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/QueryHandler.java b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/QueryHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..08b45c7a8d411aba44b8493d874fd5bb67f5a056 --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/QueryHandler.java @@ -0,0 +1,89 @@ +package eu.neclab.ngsildbroker.queryhandler; + +import java.util.HashMap; +import java.util.Map; + +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.producer.ProducerConfig; +import org.apache.kafka.common.serialization.ByteArrayDeserializer; +import org.apache.kafka.common.serialization.ByteArraySerializer; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.serialization.StringSerializer; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.autoconfigure.kafka.KafkaProperties; +import org.springframework.cloud.stream.annotation.EnableBinding; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Import; +import org.springframework.kafka.core.ConsumerFactory; +import org.springframework.kafka.core.DefaultKafkaConsumerFactory; +import org.springframework.kafka.core.DefaultKafkaProducerFactory; +import org.springframework.kafka.core.ProducerFactory; +import org.springframework.web.client.RestTemplate; + +import eu.neclab.ngsildbroker.commons.ldcontext.AtContextProducerChannel; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.ngsiqueries.QueryParser; +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; +import eu.neclab.ngsildbroker.commons.securityConfig.SecurityConfig; +import eu.neclab.ngsildbroker.commons.stream.service.CommonKafkaConfig; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaConfig; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.swaggerConfig.SwaggerConfigDetails; +import eu.neclab.ngsildbroker.queryhandler.config.QueryProducerChannel; + + + +@SpringBootApplication +@Import({KafkaConfig.class, SwaggerConfigDetails.class}) +@EnableBinding({ AtContextProducerChannel.class, QueryProducerChannel.class}) + +public class QueryHandler {// implements QueryHandlerInterface{ + + + + @Value("${atcontext.url}") + String atContextServerUrl; + + public static void main(String[] args) { + SpringApplication.run(QueryHandler.class, args); + } + + @Bean("qmops") + KafkaOps ops() { + return new KafkaOps(); + } + @Bean("qmconRes") + ContextResolverBasic conRes() { + return new ContextResolverBasic(atContextServerUrl); + } + + + @Bean("qmrestTemp") + RestTemplate restTemp() { + return new RestTemplate(); + } + + @Bean("qmsecurityConfig") + SecurityConfig securityConfig() { + return new SecurityConfig(); + } + + @Bean("qmresourceConfigDetails") + ResourceConfigDetails resourceConfigDetails() { + return new ResourceConfigDetails(); + } + + @Bean("qmqueryParser") + QueryParser queryParser() { + return new QueryParser(); + } + @Bean("qmparamsResolver") + ParamsResolver paramsResolver() { + return new ParamsResolver(); + } + +} diff --git a/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/config/QueryManagerResourceConfigurer.java b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/config/QueryManagerResourceConfigurer.java new file mode 100644 index 0000000000000000000000000000000000000000..d75acfdd012b471db1f240d1f209f3c34ae7b291 --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/config/QueryManagerResourceConfigurer.java @@ -0,0 +1,27 @@ +package eu.neclab.ngsildbroker.queryhandler.config; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer; +import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter; +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; + +/** + * REST API Resource Server. + */ +@Configuration +@EnableWebSecurity +@EnableResourceServer +@EnableGlobalMethodSecurity(prePostEnabled = true) // Allow method annotations like @PreAuthorize +public class QueryManagerResourceConfigurer extends ResourceServerConfigurerAdapter { + @Autowired + private ResourceConfigDetails resourceConfigDetails; + + @Override + public void configure(HttpSecurity http) throws Exception { + resourceConfigDetails.ngbSecurityConfig(http); + } +} diff --git a/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/config/QueryProducerChannel.java b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/config/QueryProducerChannel.java new file mode 100644 index 0000000000000000000000000000000000000000..9d440a2df8d191213ed5dbe943bb33ae30777914 --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/config/QueryProducerChannel.java @@ -0,0 +1,24 @@ +package eu.neclab.ngsildbroker.queryhandler.config; + +import org.springframework.cloud.stream.annotation.Output; +import org.springframework.messaging.MessageChannel; + +import eu.neclab.ngsildbroker.commons.stream.interfaces.IProducerChannels; + +/** + * + * @version 1.0 + * @date 10-Jul-2018 + */ +public interface QueryProducerChannel extends IProducerChannels { + + public String paginationWriteChannel = "PAGINATION"; + + + + @Output(paginationWriteChannel) + MessageChannel paginationWriteChannel(); + + + +} diff --git a/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/controller/QueryController.java b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/controller/QueryController.java new file mode 100644 index 0000000000000000000000000000000000000000..cdf43db76e850aad828e90a33e853373c0dc063e --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/controller/QueryController.java @@ -0,0 +1,324 @@ +package eu.neclab.ngsildbroker.queryhandler.controller; + +import java.net.URLDecoder; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import javax.annotation.PostConstruct; +import javax.servlet.http.HttpServletRequest; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.datatypes.QueryResult; +import eu.neclab.ngsildbroker.commons.datatypes.RestResponse; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; +import eu.neclab.ngsildbroker.queryhandler.services.QueryService; +import eu.neclab.ngsildbroker.queryhandler.utils.Validator; + +@RestController +@RequestMapping("/ngsi-ld/v1/entities") +public class QueryController {// implements QueryHandlerInterface { + private final static Logger logger = LogManager.getLogger(QueryController.class); + private final static String MY_REQUEST_URL = "/ngsi-ld/v1/entities"; + private final static String MY_REQUEST_URL_ALT = "/ngsi-ld/v1/entities/"; + @Autowired + QueryService queryService; + + @Autowired + @Qualifier("qmparamsResolver") + ParamsResolver paramsResolver; + + @Autowired + @Qualifier("qmconRes") + ContextResolverBasic contextResolver; + + @Value("${atcontext.url}") + String atContextServerUrl; + + @Value("${defaultLimit}") + int defaultLimit = 50; + @Value("${maxLimit}") + int maxLimit = 1000; + + @Value("${ngb.debugmode}") + boolean debug = false; + + private HttpUtils httpUtils; + + private final byte[] emptyResult1 = { '{', ' ', '}' }; + private final byte[] emptyResult2 = { '{', '}' }; + @PostConstruct + private void setup() { + httpUtils = HttpUtils.getInstance(contextResolver); + } + + /** + * Method(GET) for multiple attributes separated by comma list + * + * @param request + * @param entityId + * @param attrs + * @return + */ + @GetMapping(path = "/{entityId}") + public ResponseEntity getEntity(HttpServletRequest request, @PathVariable("entityId") String entityId, + @RequestParam(value = "attrs", required = false) List attrs, + @RequestParam(value = "options", required = false) List options) { + String originalQuery = NGSIConstants.QUERY_PARAMETER_ID + "=" + entityId; + HashMap paramMap = new HashMap(); + paramMap.put(NGSIConstants.QUERY_PARAMETER_ID, new String[] { entityId }); + ResponseEntity result = getQueryData(request, originalQuery, paramMap, attrs, null, null, null, options, + false, true); + if (Arrays.equals(emptyResult1, result.getBody()) || Arrays.equals(emptyResult2, result.getBody())) { + return ResponseEntity.status(HttpStatus.NOT_FOUND) + .body(new RestResponse(ErrorType.NotFound, "Resource not found.").toJsonBytes()); + } + return result; + /* + * String result = null; try { + * logger.trace("getEntity() ::query operation by kafka ::"); + * + * if (!request.getParameterMap().isEmpty() && attrs == null && options == null) + * { throw new ResponseException(ErrorType.InvalidRequest); } + * + * boolean includeSysAttrs = (options != null && + * options.contains(NGSIConstants.QUERY_PARAMETER_OPTIONS_SYSATTRS)); boolean + * keyValues = (options != null && + * options.contains(NGSIConstants.QUERY_PARAMETER_OPTIONS_KEYVALUES)); + * ArrayList expandedAttrs = new ArrayList(); + * + * if (attrs != null) { + * + * List linkHeaders = HttpUtils.parseLinkHeader(request, + * NGSIConstants.HEADER_REL_LDCONTEXT); + * + * for (String attrib : attrs) { try { + * expandedAttrs.add(paramsResolver.expandAttribute(attrib, linkHeaders)); } + * catch (ResponseException exception) { continue; } } // TODO valid this. spec + * doesn't say what to do here!!! if (expandedAttrs.isEmpty()) { return + * ResponseEntity.status(HttpStatus.ACCEPTED).body("{}".getBytes()); } } + * + * result = queryService.retrieveEntity(entityId, expandedAttrs, keyValues, + * includeSysAttrs); if (result != "null" && !result.isEmpty()) { return + * httpUtils.generateReply(request, result); } else { return + * ResponseEntity.status(HttpStatus.NOT_FOUND) .body(new + * RestResponse(ErrorType.NotFound, "Resource not found.").toJsonBytes()); } + * + * } catch (ResponseException exception) { logger.error("Exception ::", + * exception); return ResponseEntity.status(exception.getHttpStatus()).body(new + * RestResponse(exception).toJsonBytes()); } catch (Exception exception) { + * logger.error("Exception ::", exception); return + * ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) .body(new + * RestResponse(ErrorType.InternalError, "Internal error").toJsonBytes()); } + */ } + + /** + * Method(GET) for fetching all entities by kafka and other geo query operation + * by database + * + * @param request + * @param type + * @return ResponseEntity object + */ + @GetMapping() + public ResponseEntity getAllEntity(HttpServletRequest request, + @RequestParam(value = "attrs", required = false) List attrs, + @RequestParam(value = "limit", required = false) Integer limit, + @RequestParam(value = "offset", required = false) Integer offset, + @RequestParam(value = "qtoken", required = false) String qToken, + @RequestParam(name = "options", required = false) List options, + @RequestParam(name = "services", required = false) Boolean showServices) { + return getQueryData(request, request.getQueryString(), request.getParameterMap(), attrs, limit, offset, qToken, + options, showServices, false); + } + + private ResponseEntity getQueryData(HttpServletRequest request, String originalQueryParams, + Map paramMap, List attrs, Integer limit, Integer offset, String qToken, + List options, Boolean showServices, boolean retrieve) { + + if (limit == null) { + limit = defaultLimit; + } + if (offset == null) { + offset = 0; + } + + try { + logger.trace("getAllEntity() ::"); + + List linkHeaders = HttpUtils.parseLinkHeader(request, NGSIConstants.HEADER_REL_LDCONTEXT); + if (retrieve || request.getRequestURI().equals(MY_REQUEST_URL) + || request.getRequestURI().equals(MY_REQUEST_URL_ALT)) { + if (retrieve || originalQueryParams != null) { + Validator.validate(request.getParameterMap(), maxLimit, retrieve); + if (originalQueryParams != null) { + originalQueryParams = URLDecoder.decode(originalQueryParams, NGSIConstants.ENCODE_FORMAT); + } + QueryParams qp = paramsResolver.getQueryParamsFromUriQuery(paramMap, linkHeaders); + if (qp == null) // invalid query + throw new ResponseException(ErrorType.InvalidRequest); + qp.setKeyValues( + (options != null && options.contains(NGSIConstants.QUERY_PARAMETER_OPTIONS_KEYVALUES))); + qp.setIncludeSysAttrs( + (options != null && options.contains(NGSIConstants.QUERY_PARAMETER_OPTIONS_SYSATTRS))); + if (attrs != null) { + ArrayList expandedAttrs = new ArrayList(); + for (String attrib : attrs) { + try { + expandedAttrs.add(paramsResolver.expandAttribute(attrib, linkHeaders)); + } catch (ResponseException exception) { + continue; + } + } + qp.setAttrs(String.join(",", expandedAttrs)); + } + + checkParamsForValidity(qp); + QueryResult qResult = queryService.getData(qp, originalQueryParams, linkHeaders, limit, offset, + qToken, showServices); + + return generateReply(request, qResult, !retrieve); + + } else { + + if (debug) { + ArrayList allEntityResult = queryService.retriveAllEntity(); + if (allEntityResult.size() > 1) { + return httpUtils.generateReply(request, allEntityResult.get(0)); + } else { + return ResponseEntity.accepted() + .header(HttpHeaders.CONTENT_TYPE, AppConstants.NGB_APPLICATION_JSONLD) + .body(allEntityResult.get(0).getBytes()); + } + } else { + // as per [5.7.2.4] + throw new ResponseException(ErrorType.BadRequestData); + } + + } + } else { + throw new ResponseException(ErrorType.BadRequestData); + } + } catch ( + + ResponseException exception) { + logger.error("Exception ::", exception); + return ResponseEntity.status(exception.getHttpStatus()).body(new RestResponse(exception).toJsonBytes()); + } catch (Exception exception) { + logger.error("Exception ::", exception); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, exception.getLocalizedMessage()).toJsonBytes()); + } + } + + private void checkParamsForValidity(QueryParams qp) throws ResponseException { + if (qp.getGeometry() != null && !qp.getGeometry().isEmpty()) { + if (!NGSIConstants.ALLOWED_GEOMETRIES.contains(qp.getGeometry())) { + throw new ResponseException(ErrorType.BadRequestData, "Invalid geometry provided"); + } + } + if (qp.getGeorel() != null && qp.getGeorel().getGeorelOp() != null && !qp.getGeorel().getGeorelOp().isEmpty()) { + if (!NGSIConstants.ALLOWED_GEOREL.contains(qp.getGeorel().getGeorelOp())) { + throw new ResponseException(ErrorType.BadRequestData, "Invalid georel provided"); + } + } + + } + + public ResponseEntity generateReply(HttpServletRequest request, QueryResult qResult, boolean forceArray) + throws ResponseException { + String nextLink = generateNextLink(request, qResult); + String prevLink = generatePrevLink(request, qResult); + ArrayList additionalLinks = new ArrayList(); + if (nextLink != null) { + additionalLinks.add(nextLink); + } + if (prevLink != null) { + additionalLinks.add(prevLink); + } + + HashMap> additionalHeaders = new HashMap>(); + if (!additionalLinks.isEmpty()) { + additionalHeaders.put(HttpHeaders.LINK, additionalLinks); + } + + return httpUtils.generateReply(request, "[" + String.join(",", qResult.getDataString()) + "]", + additionalHeaders, null, forceArray); + } + + private String generateNextLink(HttpServletRequest request, QueryResult qResult) { + if (qResult.getResultsLeftAfter() == null || qResult.getResultsLeftAfter() <= 0) { + return null; + } + return generateFollowUpLinkHeader(request, qResult.getOffset() + qResult.getLimit(), qResult.getLimit(), + qResult.getqToken(), "next"); + } + + private String generateFollowUpLinkHeader(HttpServletRequest request, int offset, int limit, String token, + String rel) { + + StringBuilder builder = new StringBuilder(" entry : request.getParameterMap().entrySet()) { + String[] values = entry.getValue(); + String key = entry.getKey(); + if (key.equals("offset")) { + continue; + } + if (key.equals("qtoken")) { + continue; + } + if (key.equals("limit")) { + continue; + } + + for (String value : values) { + builder.append(key + "=" + value + "&"); + } + + } + builder.append("offset=" + offset + "&"); + builder.append("limit=" + limit + "&"); + builder.append("qtoken=" + token + ">;rel=\"" + rel + "\""); + return builder.toString(); + } + + private String generatePrevLink(HttpServletRequest request, QueryResult qResult) { + if (qResult.getResultsLeftBefore() == null || qResult.getResultsLeftBefore() <= 0) { + return null; + } + int offset = qResult.getOffset() - qResult.getLimit(); + if (offset < 0) { + offset = 0; + } + int limit = qResult.getLimit(); + + return generateFollowUpLinkHeader(request, offset, limit, qResult.getqToken(), "prev"); + } + +} diff --git a/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/repository/CSourceDAO.java b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/repository/CSourceDAO.java new file mode 100644 index 0000000000000000000000000000000000000000..4e43f97367774c7e55dd55fc78941b6ed0a8d7ae --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/repository/CSourceDAO.java @@ -0,0 +1,329 @@ +package eu.neclab.ngsildbroker.queryhandler.repository; + +import java.sql.SQLException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.stereotype.Repository; + +import eu.neclab.ngsildbroker.commons.constants.DBConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.GeoqueryRel; +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.storage.StorageReaderDAO; + +@Repository("qmcsourcedao") +public class CSourceDAO extends StorageReaderDAO { + + private final static Logger logger = LogManager.getLogger(CSourceDAO.class); + + protected final static String DBCOLUMN_CSOURCE_INFO_ENTITY_ID = "entity_id"; + protected final static String DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN = "entity_idpattern"; + protected final static String DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE = "entity_type"; + protected final static String DBCOLUMN_CSOURCE_INFO_PROPERTY_ID = "property_id"; + protected final static String DBCOLUMN_CSOURCE_INFO_RELATIONSHIP_ID = "relationship_id"; + + protected final static Map NGSILD_TO_SQL_RESERVED_PROPERTIES_MAPPING_GEO = initNgsildToSqlReservedPropertiesMappingGeo(); + + protected static Map initNgsildToSqlReservedPropertiesMappingGeo() { + Map map = new HashMap<>(); + map.put(NGSIConstants.NGSI_LD_LOCATION, DBConstants.DBCOLUMN_LOCATION); + return Collections.unmodifiableMap(map); + } + + protected final static Map NGSILD_TO_POSTGIS_GEO_OPERATORS_MAPPING = initNgsildToPostgisGeoOperatorsMapping(); + + protected static Map initNgsildToPostgisGeoOperatorsMapping() { + Map map = new HashMap<>(); + map.put(NGSIConstants.GEO_REL_NEAR, null); + map.put(NGSIConstants.GEO_REL_WITHIN, DBConstants.POSTGIS_INTERSECTS); + map.put(NGSIConstants.GEO_REL_CONTAINS, DBConstants.POSTGIS_CONTAINS); + map.put(NGSIConstants.GEO_REL_OVERLAPS, null); + map.put(NGSIConstants.GEO_REL_INTERSECTS, DBConstants.POSTGIS_INTERSECTS); + map.put(NGSIConstants.GEO_REL_EQUALS, DBConstants.POSTGIS_CONTAINS); + map.put(NGSIConstants.GEO_REL_DISJOINT, null); + return Collections.unmodifiableMap(map); + } + + private boolean externalCsourcesOnly = false; + + @Override + public List query(QueryParams qp) { + this.externalCsourcesOnly = false; + return super.query(qp); + } + + public List queryExternalCsources(QueryParams qp) throws SQLException { + this.externalCsourcesOnly = true; + return super.query(qp); + } + + @Override + protected String translateNgsildQueryToSql(QueryParams qp) throws ResponseException { + StringBuilder fullSqlWhere = new StringBuilder(70); + String sqlWhere = ""; + boolean csourceInformationIsNeeded = false; + boolean sqlOk = false; + + if (externalCsourcesOnly) { + fullSqlWhere.append("(c.internal = false) AND "); + } + + // query by type + (id, idPattern) + if (qp.getType()!=null) { + + String typeValue = qp.getType(); + String idValue = ""; + String idPatternValue = ""; + if (qp.getId()!=null) + idValue = qp.getId(); + if (qp.getIdPattern()!=null) + idPatternValue = qp.getIdPattern(); + // id takes precedence on idPattern. clear idPattern if both are given + if (!idValue.isEmpty() && !idPatternValue.isEmpty()) + idPatternValue = ""; + + // query by type + (id, idPattern) + attrs + if (qp.getAttrs()!=null) { + String attrsValue = qp.getAttrs(); + sqlWhere = getCommonSqlWhereForTypeIdIdPattern(typeValue, idValue, idPatternValue); + sqlWhere += " AND "; + sqlWhere += getSqlWhereByAttrsInTypeFiltering(attrsValue); + + } else { // query by type + (id, idPattern) only (no attrs) + + sqlWhere = "(c.has_registrationinfo_with_attrs_only) OR "; + sqlWhere += getCommonSqlWhereForTypeIdIdPattern(typeValue, idValue, idPatternValue); + + } + fullSqlWhere.append("(" + sqlWhere + ") AND "); + csourceInformationIsNeeded = true; + sqlOk = true; + + // query by attrs only + } else if (qp.getAttrs()!=null) { + String attrsValue = qp.getAttrs(); + if (attrsValue.indexOf(",") == -1) { + sqlWhere = "ci." + DBCOLUMN_CSOURCE_INFO_PROPERTY_ID+" = '"+attrsValue+"' OR " + +"ci." + DBCOLUMN_CSOURCE_INFO_RELATIONSHIP_ID+" = '"+attrsValue+"'"; + }else { + sqlWhere="ci." + DBCOLUMN_CSOURCE_INFO_PROPERTY_ID+" IN ('"+attrsValue.replace(",", "','")+"') OR " + +"ci." + DBCOLUMN_CSOURCE_INFO_RELATIONSHIP_ID+" IN ('"+attrsValue.replace(",", "','")+"')"; + } + fullSqlWhere.append("(" + sqlWhere + ") AND "); + csourceInformationIsNeeded = true; + sqlOk = true; + } + + // advanced query "q" + if (qp.getQ()!=null) { + // TODO: it's not clear in spec how this should work + logger.error("'q' filter has not been developed yet in csource discovery!"); + return ""; + } + + // geoquery + if (qp.getGeorel()!=null) { + GeoqueryRel gqr = qp.getGeorel(); + logger.debug("Georel value " + gqr.getGeorelOp()); + try { + sqlWhere = translateNgsildGeoqueryToPostgisQuery(gqr, qp.getGeometry(), qp.getCoordinates(), + qp.getGeoproperty()); + } catch (ResponseException e) { + e.printStackTrace(); + } + fullSqlWhere.append(sqlWhere + " AND "); + sqlOk = true; + } + + if (sqlOk) { + String sqlQuery = "SELECT DISTINCT c.data " + "FROM " + DBConstants.DBTABLE_CSOURCE + " c "; + if (csourceInformationIsNeeded) + sqlQuery += "INNER JOIN " + DBConstants.DBTABLE_CSOURCE_INFO + " ci ON (ci.csource_id = c.id) "; + + if (fullSqlWhere.length() > 0) { + sqlQuery += "WHERE " + fullSqlWhere.toString() + " 1=1 "; + } + // order by ? + return sqlQuery; + } else { + return ""; + } + } + + private String getCommonSqlWhereForTypeIdIdPattern(String typeValue, String idValue, String idPatternValue) { + String sqlWhere = ""; + if (idValue.isEmpty() && idPatternValue.isEmpty()) { // case 1: type only + sqlWhere += getSqlWhereByType(typeValue, false); + } else if (!idValue.isEmpty() && idPatternValue.isEmpty()) { // case 2: type+id + sqlWhere += "("; + sqlWhere += getSqlWhereByType(typeValue, true); + sqlWhere += " OR "; + sqlWhere += getSqlWhereById(typeValue, idValue); + sqlWhere += ")"; + } else if (idValue.isEmpty() && !idPatternValue.isEmpty()) { // case 3: type+idPattern + sqlWhere += "("; + sqlWhere += getSqlWhereByType(typeValue, true); + sqlWhere += " OR "; + sqlWhere += getSqlWhereByIdPattern(typeValue, idPatternValue); + sqlWhere += ")"; + } + return sqlWhere; + } + + private String getSqlWhereByType(String typeValue, boolean includeIdAndIdPatternNullTest) { + String sqlWhere = "("; + if (typeValue.indexOf(",") == -1) { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " = '" + typeValue + "' "; + } else { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " IN ('" + typeValue.replace(",", "','") + "') "; + } + if (includeIdAndIdPatternNullTest) + sqlWhere += "AND ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_ID + " IS NULL AND " + + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN + " IS NULL"; + sqlWhere += ")"; + return sqlWhere; + } + + private String getSqlWhereById(String typeValue, String idValue) { + String sqlWhere = "( "; + + if (typeValue.indexOf(",") == -1) { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " = '" + typeValue + "' AND "; + } else { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " IN ('" + typeValue.replace(",", "','") + "') AND "; + } + + if (idValue.indexOf(",") == -1) { + sqlWhere += "(" + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_ID + " = '" + idValue + "' OR " + "'" + + idValue + "' ~ " + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN + ")"; + } else { + String[] ids = idValue.split(","); + String whereId = "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_ID + " IN ( "; + String whereIdPattern = "("; + for (String id : ids) { + whereId += "'" + id + "',"; + whereIdPattern += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN + " ~ '" + id + + "' OR "; + } + whereId = StringUtils.chomp(whereId, ","); + whereIdPattern = StringUtils.chomp(whereIdPattern, "OR "); + whereId += ")"; + whereIdPattern += ")"; + + sqlWhere += "(" + whereId + " OR " + whereIdPattern + ")"; + } + + sqlWhere += " )"; + return sqlWhere; + } + + private String getSqlWhereByIdPattern(String typeValue, String idPatternValue) { + String sqlWhere = "( "; + if (typeValue.indexOf(",") == -1) { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " = '" + typeValue + "' AND "; + } else { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " IN ('" + typeValue.replace(",", "','") + "') AND "; + } + sqlWhere += "(" + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_ID + " ~ '" + idPatternValue + "' OR " + + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN + " ~ '" + idPatternValue + "')"; + sqlWhere += " )"; + return sqlWhere; + } + + private String getSqlWhereByAttrsInTypeFiltering(String attrsValue) { + String sqlWhere; + sqlWhere = "( " + + "NOT EXISTS (SELECT 1 FROM csourceinformation ci2 " + + " WHERE ci2.group_id = ci.group_id AND " + + " (ci2.property_id IS NOT NULL OR ci2.relationship_id IS NOT NULL)) " + + "OR " + + "EXISTS (SELECT 1 FROM csourceinformation ci3 " + + " WHERE ci3.group_id = ci.group_id AND " ; + if (attrsValue.indexOf(",") == -1) { + sqlWhere += "(ci3.property_id = '" + attrsValue + "' OR " + + " ci3.relationship_id = '" + attrsValue + "') "; + } else { + sqlWhere += "(ci3.property_id IN ('" + attrsValue.replace(",", "','") + "') OR " + + " ci3.relationship_id IN ('" + attrsValue.replace(",", "','") + "') ) "; + } + sqlWhere += ") )"; + return sqlWhere; + } + + // TODO: SQL input sanitization + // TODO: property of property + // TODO: [SPEC] spec is not clear on how to define a "property of property" in + // the geoproperty field. (probably using dots, but...) + @Override + protected String translateNgsildGeoqueryToPostgisQuery(GeoqueryRel georel, String geometry, String coordinates, + String geoproperty) throws ResponseException { + if (georel.getGeorelOp().isEmpty() || geometry==null || coordinates==null || geometry.isEmpty() || coordinates.isEmpty()) { + logger.error("georel, geometry and coordinates are empty or invalid!"); + throw new ResponseException(ErrorType.BadRequestData, + "georel, geometry and coordinates are empty or invalid!"); + } + + StringBuilder sqlWhere = new StringBuilder(50); + + String georelOp = georel.getGeorelOp(); + logger.debug(" Geoquery term georelOp: " + georelOp); + + String dbColumn = NGSILD_TO_SQL_RESERVED_PROPERTIES_MAPPING_GEO.get(geoproperty); + if (dbColumn == null) { + dbColumn = "ST_SetSRID(ST_GeomFromGeoJSON( c.data#>>'{" + geoproperty + ",0," + + NGSIConstants.JSON_LD_VALUE + "}'), 4326)"; + } else { + dbColumn = "c." + dbColumn; + } + + String referenceValue = "ST_SetSRID(ST_GeomFromGeoJSON('{\"type\": \"" + geometry + "\", \"coordinates\": " + + coordinates + " }'), 4326)"; + + switch (georelOp) { + case NGSIConstants.GEO_REL_WITHIN: + case NGSIConstants.GEO_REL_CONTAINS: + case NGSIConstants.GEO_REL_INTERSECTS: + case NGSIConstants.GEO_REL_EQUALS: + sqlWhere.append(NGSILD_TO_POSTGIS_GEO_OPERATORS_MAPPING.get(georelOp) + "( " + dbColumn + ", " + + referenceValue + ") "); + break; + case NGSIConstants.GEO_REL_NEAR: + if (georel.getDistanceType()!=null && georel.getDistanceValue()!=null) { + if (georel.getDistanceType().equals(NGSIConstants.GEO_REL_MIN_DISTANCE)) + sqlWhere.append("NOT " + DBConstants.POSTGIS_WITHIN + "( " + dbColumn + ", ST_Buffer(" + referenceValue + + "::geography, " + georel.getDistanceValue() + + ")::geometry ) "); + else + sqlWhere.append(DBConstants.POSTGIS_INTERSECTS + "( " + dbColumn + ", ST_Buffer(" + referenceValue + + "::geography, " + georel.getDistanceValue() + + ")::geometry ) "); + } else { + throw new ResponseException(ErrorType.BadRequestData, + "GeoQuery: Type and distance are required for near relation"); + } + break; + case NGSIConstants.GEO_REL_OVERLAPS: + sqlWhere.append("("); + sqlWhere.append(DBConstants.POSTGIS_OVERLAPS + "( " + dbColumn + ", " + referenceValue + ")"); + sqlWhere.append(" OR "); + sqlWhere.append(DBConstants.POSTGIS_CONTAINS + "( " + dbColumn + ", " + referenceValue + ")"); + sqlWhere.append(")"); + break; + case NGSIConstants.GEO_REL_DISJOINT: + sqlWhere.append("NOT " + DBConstants.POSTGIS_WITHIN + "( " + dbColumn + ", " + referenceValue + ") "); + break; + default: + throw new ResponseException(ErrorType.BadRequestData, "Invalid georel operator: " + georelOp); + } + return sqlWhere.toString(); + } + +} diff --git a/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/repository/QueryDAO.java b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/repository/QueryDAO.java new file mode 100644 index 0000000000000000000000000000000000000000..75e4bc94b0151dd8454c431e3ed347562a0ade3e --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/repository/QueryDAO.java @@ -0,0 +1,10 @@ +package eu.neclab.ngsildbroker.queryhandler.repository; + +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.stereotype.Repository; +import eu.neclab.ngsildbroker.commons.storage.StorageReaderDAO; + +@Repository +public class QueryDAO extends StorageReaderDAO { + +} diff --git a/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/services/QueryService.java b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/services/QueryService.java new file mode 100644 index 0000000000000000000000000000000000000000..fb7ab3df4ff3dda2ee3e4839e62628619865a2c2 --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/services/QueryService.java @@ -0,0 +1,525 @@ +package eu.neclab.ngsildbroker.queryhandler.services; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; +import java.io.DataOutputStream; +import java.io.IOException; +import java.lang.reflect.Type; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import javax.annotation.PostConstruct; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.header.internals.RecordHeader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.kafka.requestreply.ReplyingKafkaTemplate; +import org.springframework.kafka.requestreply.RequestReplyFuture; +import org.springframework.kafka.support.KafkaHeaders; +import org.springframework.stereotype.Service; +import org.springframework.web.client.RestTemplate; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.gson.JsonSyntaxException; +import com.google.gson.reflect.TypeToken; +import com.netflix.discovery.EurekaClient; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.constants.KafkaConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.CSourceRegistration; +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.datatypes.QueryResult; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.queryhandler.config.QueryProducerChannel; +import eu.neclab.ngsildbroker.queryhandler.repository.CSourceDAO; +import eu.neclab.ngsildbroker.queryhandler.repository.QueryDAO; + +@Service +public class QueryService { + + private final static Logger logger = LoggerFactory.getLogger(QueryService.class); + +// public static final Gson GSON = DataSerializer.GSON; + + @Value("${entity.topic}") + String ENTITY_TOPIC; + @Value("${entity.keyValues.topic}") + String KVENTITY_TOPIC; + @Value("${entity.withoutSysAttrs.topic}") + String ENTITY_WITHOUT_SYSATTRS_TOPIC; + @Value("${atcontext.url}") + String atContextServerUrl; + + @Autowired + @Qualifier("qmops") + KafkaOps operations; + + @Autowired + ObjectMapper objectMapper; + + @Autowired + @Qualifier("qmconRes") + ContextResolverBasic contextResolver; + + @Value("${query.topic}") + String requestTopic; + + @Value("${query.result.topic}") + String queryResultTopic; + + @Value("${csource.query.topic}") + String csourceQueryTopic; + + @Value("${kafka.replytimeout}") + long replyTimeout = 5000; + + @Value("${maxLimit}") + int maxLimit = 500; + + @Autowired + QueryDAO queryDAO; + + @Autowired + @Qualifier("qmcsourcedao") + CSourceDAO cSourceDAO; + + @Value("${directDbConnection}") + boolean directDbConnection; + + @SuppressWarnings("unused") + + @Autowired + private EurekaClient eurekaClient; + + @Autowired + ReplyingKafkaTemplate kafkaTemplate; + + @Autowired + @Qualifier("qmrestTemp") + RestTemplate restTemplate; + + private QueryProducerChannel producerChannels; + + public QueryService(QueryProducerChannel producerChannels) { + + this.producerChannels = producerChannels; + } + + @PostConstruct + private void setup() { + kafkaTemplate.setReplyTimeout(replyTimeout); + } + + /** + * Method is used for get entity based on entity id or attributes + * + * @param entityId + * @param attrs + * @return String + * @throws ResponseException + * @throws IOException + */ + public String retrieveEntity(String entityId, List attrs, boolean keyValues, boolean includeSysAttrs) + throws ResponseException, IOException { + + logger.trace("call retriveEntity method in QueryService class"); + // null id check + /* + * if (entityId == null) throw new ResponseException(ErrorType.BadRequestData); + */ + boolean checkData = entityId.contains("="); + if (checkData) { + throw new ResponseException(ErrorType.BadRequestData); + } + // get entity from ENTITY topic. + byte[] entityJson; + if (keyValues) { + entityJson = operations.getMessage(entityId, this.KVENTITY_TOPIC); + } else { + if (includeSysAttrs) + entityJson = operations.getMessage(entityId, this.ENTITY_TOPIC); + else + entityJson = operations.getMessage(entityId, this.ENTITY_WITHOUT_SYSATTRS_TOPIC); + } + // check whether exists. + if (entityJson == null) + throw new ResponseException(ErrorType.NotFound); + + JsonNode entityJsonBody = objectMapper.createObjectNode(); + if (attrs != null && !attrs.isEmpty()) { + JsonNode entityChildJsonBody = objectMapper.createObjectNode(); + entityChildJsonBody = objectMapper.readTree(entityJson).get(NGSIConstants.JSON_LD_ID); + ((ObjectNode) entityJsonBody).set(NGSIConstants.JSON_LD_ID, entityChildJsonBody); + entityChildJsonBody = objectMapper.readTree(entityJson).get(NGSIConstants.JSON_LD_TYPE); + ((ObjectNode) entityJsonBody).set(NGSIConstants.JSON_LD_TYPE, entityChildJsonBody); + + if (includeSysAttrs) { + entityChildJsonBody = objectMapper.readTree(entityJson).get(NGSIConstants.NGSI_LD_CREATED_AT); + ((ObjectNode) entityJsonBody).set(NGSIConstants.NGSI_LD_CREATED_AT, entityChildJsonBody); + entityChildJsonBody = objectMapper.readTree(entityJson).get(NGSIConstants.NGSI_LD_MODIFIED_AT); + ((ObjectNode) entityJsonBody).set(NGSIConstants.NGSI_LD_MODIFIED_AT, entityChildJsonBody); + } + + for (int i = 0; i < attrs.size(); i++) { + entityChildJsonBody = objectMapper.readTree(entityJson).get(attrs.get(i)); + ((ObjectNode) entityJsonBody).set(attrs.get(i), entityChildJsonBody); + } + } else { + entityJsonBody = objectMapper.readTree(entityJson); + } + if (keyValues && !includeSysAttrs) { // manually remove createdAt and modifiedAt at root level + ObjectNode objectNode = (ObjectNode) entityJsonBody; + objectNode.remove(NGSIConstants.NGSI_LD_CREATED_AT); + objectNode.remove(NGSIConstants.NGSI_LD_MODIFIED_AT); + logger.debug("sysattrs removed"); + } + + return entityJsonBody.toString(); + } + + /** + * Method is used for get all entity operation + * + * @return List + * @throws ResponseException + * @throws IOException + */ + public ArrayList retriveAllEntity() throws ResponseException, IOException { + logger.trace("retriveAllEntity() in QueryService class :: started"); + byte[] entity = null; + Map records = operations.pullFromKafka(this.ENTITY_TOPIC); + ArrayList result = new ArrayList(); + if (records.isEmpty()) { + result.add("[]"); + } else { + + StringBuilder resultString = new StringBuilder("["); + + for (String recordKey : records.keySet()) { + entity = records.get(recordKey); + if (Arrays.equals(entity, AppConstants.NULL_BYTES)) { + continue; + } + + resultString.append(new String(entity)); + resultString.append(","); + } + logger.trace("retriveAllEntity() in QueryService class :: completed"); + + if (resultString.length() == 1) // it has only the first square bracket, no entities + resultString.append("]"); + else + resultString.setCharAt(resultString.length() - 1, ']'); + + result.add(resultString.toString()); + result.addAll(records.keySet()); + } + return result; + } + + /** + * Method is used for query request and query response is being implemented as + * synchronized flow + * + * @param storageManagerQuery + * @return String + * @throws Exception + */ + public List getFromStorageManager(String storageManagerQuery) throws Exception { + // create producer record + logger.info("getFromStorageManager() :: started"); + ProducerRecord record = new ProducerRecord(requestTopic, + storageManagerQuery.getBytes()); + // set reply topic in header + record.headers().add(new RecordHeader(KafkaHeaders.REPLY_TOPIC, queryResultTopic.getBytes())); + RequestReplyFuture sendAndReceive = kafkaTemplate.sendAndReceive(record); + // get consumer record + ConsumerRecord consumerRecord = sendAndReceive.get(); + // read from byte array + ByteArrayInputStream bais = new ByteArrayInputStream(consumerRecord.value()); + DataInputStream in = new DataInputStream(bais); + List entityList = new ArrayList(); + while (in.available() > 0) { + entityList.add(in.readUTF()); + } + // return consumer value + logger.info("getFromStorageManager() :: completed"); + return entityList; + } + + /** + * Method is used for query request and query response is being implemented as + * synchronized flow + * + * @param contextRegistryQuery + * @return String + * @throws Exception + */ + public List getFromContextRegistry(String contextRegistryQuery) throws Exception { + // create producer record + String contextRegistryData = null; + logger.info("getFromContextRegistry() :: started"); + logger.info("CSourceQuery Topic is::" + csourceQueryTopic); + ProducerRecord record = new ProducerRecord(csourceQueryTopic, + contextRegistryQuery.getBytes()); + // set reply topic in header + record.headers().add(new RecordHeader(KafkaHeaders.REPLY_TOPIC, queryResultTopic.getBytes())) + .add(KafkaHeaders.MESSAGE_KEY, "dummy".getBytes());// change with some useful key + RequestReplyFuture sendAndReceive = kafkaTemplate.sendAndReceive(record); + // get consumer record + ConsumerRecord consumerRecord = sendAndReceive.get(); + // return consumer value + logger.info("getFromContextRegistry() :: completed"); + contextRegistryData = new String((byte[]) consumerRecord.value()); + logger.info("getFromContextRegistry() data broker list::" + contextRegistryData); + return DataSerializer.getStringList(contextRegistryData); + } + + /** + * To calling storage manager and registry manager (to discover csources) + * + * @param queryRequest + * @param rawQueryString + * @param qToken + * @param offset + * @param limit + * @param expandedAttrs + * @return List + * @throws ExecutionException + * @throws InterruptedException + * @throws IOException + * @throws ResponseException + * @throws URISyntaxException + * @throws Exception + */ + public QueryResult getData(QueryParams qp, String rawQueryString, List linkHeaders, Integer limit, + Integer offset, String qToken, Boolean showServices) throws ResponseException, Exception { + + logger.info("STRATOS IM HERE"); + + List aggregatedResult = new ArrayList(); + QueryResult result = new QueryResult(null, null, ErrorType.None, -1, true); + List realResult; + qp.setLimit(limit); + qp.setOffSet(offset); + int dataLeft = 0; + if (qToken == null) { + ExecutorService executorService = Executors.newFixedThreadPool(2); + + Future> futureStorageManager = executorService.submit(new Callable>() { + public List call() throws Exception { + logger.trace("Asynchronous Callable storage manager"); + //TAKE CARE OF PAGINATION HERE + if (queryDAO != null) { + return queryDAO.query(qp); + } else { + return getFromStorageManager(DataSerializer.toJson(qp)); + } + } + }); + + Future> futureContextRegistry = executorService.submit(new Callable>() { + public List call() throws Exception { + try { + List fromCsources = new ArrayList(); + logger.info("STRATOS IM HERE 2"); + logger.trace("Asynchronous 1 context registry"); + List brokerList; + if (cSourceDAO != null) { + brokerList = cSourceDAO.queryExternalCsources(qp); + } else { + brokerList = getFromContextRegistry(DataSerializer.toJson(qp)); + } + Pattern p = Pattern.compile(NGSIConstants.NGSI_LD_ENDPOINT_REGEX); + Matcher m; + Set> callablesCollection = new HashSet>(); + for (String brokerInfo : brokerList) { + m = p.matcher(brokerInfo); + m.find(); + String uri = m.group(1); + logger.debug("url " + uri.toString() + "/ngsi-ld/v1/entities/?" + rawQueryString); + logger.info("STRATOS url " + uri.toString() + "/ngsi-ld/v1/entities/?" + rawQueryString); + Callable callable = () -> { + HttpHeaders headers = new HttpHeaders(); + for (Object link : linkHeaders) { + headers.add("Link", "<" + link.toString() + + ">; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\""); + } + + HttpEntity entity = new HttpEntity<>(headers); + + String result = restTemplate.exchange(uri + "/ngsi-ld/v1/entities/?" + rawQueryString, + HttpMethod.GET, entity, String.class).getBody(); + + logger.debug("http call result :: ::" + result); + return result; + }; + callablesCollection.add(callable); + + } + fromCsources = getDataFromCsources(callablesCollection); + logger.debug("csource call response :: "); + // fromCsources.forEach(e -> logger.debug(e)); + + return fromCsources; + } catch (Exception e) { + e.printStackTrace(); + logger.error( + "No reply from registry. Looks like you are running without a context source registry."); + logger.error(e.getMessage()); + return null; + } + } + }); + + // Csources response + + executorService.shutdown(); + + // storage response + logger.trace("storage task status completed :: " + futureStorageManager.isDone()); + List fromStorage = (List) futureStorageManager.get(); + List fromCsources = (List) futureContextRegistry.get(); + // logger.trace("response from storage :: "); + // fromStorage.forEach(e -> logger.debug(e)); + + aggregatedResult.addAll(fromStorage); + if (fromCsources != null) { + aggregatedResult.addAll(fromCsources); + } + // logger.trace("aggregated"); + // aggregatedResult.forEach(e -> logger.debug(e)); + /* + * if (aggregatedResult.size() > limit) { qToken = generateToken(); String + * writeToken = qToken; int end = offset + limit; if (end > + * aggregatedResult.size()) { end = aggregatedResult.size(); } realResult = + * aggregatedResult.subList(offset, end); dataLeft = aggregatedResult.size() - + * end; new Thread() { public void run() { try { + * writeFullResultToKafka(writeToken, aggregatedResult); } catch (IOException e) + * { + * + * } catch (ResponseException e) { + * + * } }; }.start(); } else { + */ + realResult = aggregatedResult; + //} + } else { + // read from byte array + byte[] data = operations.getMessage(qToken, KafkaConstants.PAGINATION_TOPIC); + if (data == null) { + throw new ResponseException(ErrorType.BadRequestData, + "The provided qtoken is not valid. Provide a valid qtoken or remove the parameter to start a new query"); + } + ByteArrayInputStream bais = new ByteArrayInputStream(data); + DataInputStream in = new DataInputStream(bais); + while (in.available() > 0) { + aggregatedResult.add(in.readUTF()); + } + int end = offset + limit; + if (end > aggregatedResult.size()) { + end = aggregatedResult.size(); + } + realResult = aggregatedResult.subList(offset, end); + dataLeft = aggregatedResult.size() - end; + + } + result.setDataString(realResult); + result.setqToken(qToken); + result.setLimit(limit); + result.setOffset(offset); + result.setResultsLeftAfter(dataLeft); + result.setResultsLeftBefore(offset); + return result; + } + + private void writeFullResultToKafka(String qToken, List aggregatedResult) + throws IOException, ResponseException { + // write to byte array + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + DataOutputStream out = new DataOutputStream(baos); + for (String element : aggregatedResult) { + out.writeUTF(element); + } + operations.pushToKafka(producerChannels.paginationWriteChannel(), qToken.getBytes(), baos.toByteArray()); + } + + private String generateToken() { + return UUID.randomUUID().toString(); + } + + /** + * making http call to all discovered csources async. + * + * @param endpointsList + * @param query + * @return List + * @throws InterruptedException + * @throws ExecutionException + * @throws URISyntaxException + * @throws ResponseException + * @throws IOException + */ + private List getDataFromCsources(Set> callablesCollection) + throws ResponseException, Exception { + List allDiscoveredEntities = new ArrayList(); + ExecutorService executorService = Executors.newFixedThreadPool(2); + List> futures = executorService.invokeAll(callablesCollection); + // TODO: why sleep? + // Thread.sleep(5000); + for (Future future : futures) { + logger.trace("future.isDone = " + future.isDone()); + List entitiesList = new ArrayList(); + try { + String response = (String) future.get(); + logger.debug("response from invoke all ::" + response); + if (!("[]").equals(response) && response != null) { + JsonNode jsonNode = objectMapper.readTree(response); + for (int i = 0; i <= jsonNode.size(); i++) { + if (jsonNode.get(i) != null && !jsonNode.isNull()) { + String payload = contextResolver.expand(jsonNode.get(i).toString(), null, true, AppConstants.ENTITIES_URL_ID);// , linkHeaders); + entitiesList.add(payload); + } + } + } + } catch (JsonSyntaxException | ExecutionException e) { + logger.error("Exception ::", e); + } + allDiscoveredEntities.addAll(entitiesList); + } + executorService.shutdown(); + logger.trace("getDataFromCsources() completed ::"); + return allDiscoveredEntities; + } +} diff --git a/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/utils/Validator.java b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/utils/Validator.java new file mode 100644 index 0000000000000000000000000000000000000000..3e60b4b5696e29770129407ba8537afc7df5a667 --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/utils/Validator.java @@ -0,0 +1,51 @@ +package eu.neclab.ngsildbroker.queryhandler.utils; + +import java.util.HashSet; +import java.util.Map; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; + +public class Validator { + + private static HashSet validParams = new HashSet(); + static { + validParams.add(NGSIConstants.QUERY_PARAMETER_TYPE); + validParams.add(NGSIConstants.QUERY_PARAMETER_ID); + validParams.add(NGSIConstants.QUERY_PARAMETER_IDPATTERN); + validParams.add(NGSIConstants.QUERY_PARAMETER_ATTRS); + validParams.add(NGSIConstants.QUERY_PARAMETER_QUERY); + validParams.add(NGSIConstants.QUERY_PARAMETER_GEOREL); + validParams.add(NGSIConstants.QUERY_PARAMETER_GEOMETRY); + validParams.add(NGSIConstants.QUERY_PARAMETER_COORDINATES); + validParams.add(NGSIConstants.QUERY_PARAMETER_GEOPROPERTY); + validParams.add(NGSIConstants.QUERY_PARAMETER_TIMEPROPERTY); + validParams.add(NGSIConstants.QUERY_PARAMETER_OFFSET); + validParams.add(NGSIConstants.QUERY_PARAMETER_LIMIT); + validParams.add(NGSIConstants.QUERY_PARAMETER_QTOKEN); + validParams.add(NGSIConstants.QUERY_PARAMETER_OPTIONS); + } + + + + + public static void validate(Map parameterMap, int maxLimit, boolean ignoreType) throws ResponseException{ + + if(!ignoreType && !parameterMap.containsKey(NGSIConstants.QUERY_PARAMETER_TYPE) && !parameterMap.containsKey(NGSIConstants.QUERY_PARAMETER_ATTRS)) { + throw new ResponseException(ErrorType.BadRequestData, "Missing mandatory minimum parameter " + NGSIConstants.QUERY_PARAMETER_TYPE + " or " + NGSIConstants.QUERY_PARAMETER_ATTRS); + } + for (String key : parameterMap.keySet()) { + if(!validParams.contains(key)) { + throw new ResponseException(ErrorType.BadRequestData, key + " is not valid parameter"); + } + if(key.equals(NGSIConstants.QUERY_PARAMETER_LIMIT)) { + int value = Integer.parseInt(parameterMap.get(key)[0]); + if(value > maxLimit) { + throw new ResponseException(ErrorType.TooManyResults, "The limit in the request is too big. To request with the max limit of " + maxLimit + " remove the limit parameter"); + } + } + } + + } +} diff --git a/scorpio-broker/Core/QueryManager/src/main/resources/application-aaio.yml b/scorpio-broker/Core/QueryManager/src/main/resources/application-aaio.yml new file mode 100644 index 0000000000000000000000000000000000000000..1bdfede149b326d903dcaa9412dbb5749f9a3dbb --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/main/resources/application-aaio.yml @@ -0,0 +1,31 @@ +server: + port: 1026 + +bootstrap: + servers: kafka:9092 + +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +directDbConnection: true +#if directDbConnection is false then properties below are not used {{{ +spring: + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + datasource: + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_querymanager + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 +#}}} \ No newline at end of file diff --git a/scorpio-broker/Core/QueryManager/src/main/resources/application-aio.yml b/scorpio-broker/Core/QueryManager/src/main/resources/application-aio.yml new file mode 100644 index 0000000000000000000000000000000000000000..87252eba9a8192ed522638889f4943c8535e895c --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/main/resources/application-aio.yml @@ -0,0 +1,31 @@ +server: + port: 1026 + +bootstrap: + servers: localhost:9092 + +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +directDbConnection: true +#if directDbConnection is false then properties below are not used {{{ +spring: + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + datasource: + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_querymanager + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 +#}}} \ No newline at end of file diff --git a/scorpio-broker/Core/QueryManager/src/main/resources/application-dist.yml b/scorpio-broker/Core/QueryManager/src/main/resources/application-dist.yml new file mode 100644 index 0000000000000000000000000000000000000000..12cb53e8ffa1211848b7e5c3b2a004a2c4fcb527 --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/main/resources/application-dist.yml @@ -0,0 +1,31 @@ +server: + port: 1026 + +bootstrap: + servers: kafka:9092 + +eureka: + client: + serviceUrl: + defaultZone: http://eureka:8761/eureka/ + +directDbConnection: true +#if directDbConnection is false then properties below are not used {{{ +spring: + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + datasource: + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_querymanager + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 +#}}} \ No newline at end of file diff --git a/scorpio-broker/Core/QueryManager/src/main/resources/application.yml b/scorpio-broker/Core/QueryManager/src/main/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..8ac9daf36fac2ce899132d0abaaab98748de736c --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/main/resources/application.yml @@ -0,0 +1,83 @@ +spring: + application: + name: query-manager + main: + lazy-initialization: true + kafka: + admin: + properties: + cleanup: + policy: compact + flyway: + baselineOnMigrate: true + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + bindings: + ATCONTEXT_WRITE_CHANNEL: + destination: ATCONTEXT + contentType: application/json + datasource: + url: "jdbc:postgresql://127.0.0.1:5432/ngb?ApplicationName=ngb_querymanager" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 + +server: + port: 1026 + tomcat: + max: + threads: 50 +#Entity-Manager properties +entity: + topic: ENTITY + keyValues: + topic: KVENTITY + withoutSysAttrs: + topic: ENTITY_WITHOUT_SYSATTRS +query: + topic: QUERY + result: + topic: QUERY_RESULT + +bootstrap: + servers: localhost:9092 +#enable log compaction + + +csource: + query: + topic: CONTEXT_REGISTRY_QUERY +management: + endpoints: + web: + exposure: + include: "*" + endpoint: + restart: + enabled: true + +atcontext: + url: http://localhost:9090/ngsi-ld/contextes/ +ngb: + debugmode: false +kafka: + replytimeout: 10000 + +defaultLimit: 50 +maxLimit: 500 +directDbConnection: true +#if directDbConnection is false then properties below are not used {{{ + + + + + \ No newline at end of file diff --git a/scorpio-broker/Core/QueryManager/src/main/resources/log4j2-spring.xml b/scorpio-broker/Core/QueryManager/src/main/resources/log4j2-spring.xml new file mode 100644 index 0000000000000000000000000000000000000000..d58eb5aefcf63d20872602a28c985432800b0bd4 --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/main/resources/log4j2-spring.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + %d %p %C{1.} [%t] %m%n + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/scorpio-broker/Core/QueryManager/src/test/java/eu/neclab/ngsildbroker/queryhandler/QueryHandlerTest.java b/scorpio-broker/Core/QueryManager/src/test/java/eu/neclab/ngsildbroker/queryhandler/QueryHandlerTest.java new file mode 100644 index 0000000000000000000000000000000000000000..3ffe0c722f08bf91562518fae6cf6ad480363165 --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/test/java/eu/neclab/ngsildbroker/queryhandler/QueryHandlerTest.java @@ -0,0 +1,13 @@ +package eu.neclab.ngsildbroker.queryhandler; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; +@RunWith(SpringRunner.class) +@SpringBootTest(properties= {"spring.main.allow-bean-definition-overriding=true"}) +public class QueryHandlerTest { + @Test + public void contextLoads() { + } +} diff --git a/scorpio-broker/Core/QueryManager/src/test/java/eu/neclab/ngsildbroker/queryhandler/controller/QueryControllerTest.java b/scorpio-broker/Core/QueryManager/src/test/java/eu/neclab/ngsildbroker/queryhandler/controller/QueryControllerTest.java new file mode 100644 index 0000000000000000000000000000000000000000..fd5d0165c6c983035dcd1ba45b77600eb820eb8a --- /dev/null +++ b/scorpio-broker/Core/QueryManager/src/test/java/eu/neclab/ngsildbroker/queryhandler/controller/QueryControllerTest.java @@ -0,0 +1,477 @@ +package eu.neclab.ngsildbroker.queryhandler.controller; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import javax.servlet.http.HttpServletRequest; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.powermock.core.classloader.annotations.PowerMockIgnore; +import org.powermock.modules.junit4.PowerMockRunner; +import org.powermock.modules.junit4.PowerMockRunnerDelegate; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.web.servlet.MockMvc; +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.datatypes.QueryResult; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.queryhandler.services.QueryService; + +@SpringBootTest(properties = { "spring.main.allow-bean-definition-overriding=true" }) +@RunWith(PowerMockRunner.class) +//@WebMvcTest(secure = false) +@AutoConfigureMockMvc(secure = false) +@PowerMockRunnerDelegate(SpringRunner.class) +@PowerMockIgnore({ "javax.management.*", "com.sun.org.apache.xerces.*", "javax.xml.*", "org.xml.*", "org.w3c.*", + "com.sun.org.apache.xalan.*", "javax.activation.*", "javax.net.*", "javax.security.*" }) +public class QueryControllerTest { + + @Autowired + private MockMvc mockMvc; + @MockBean + private QueryService queryService; + @Autowired + ContextResolverBasic contextResolver; + @Autowired + ParamsResolver paramsResolver; +// @InjectMocks +// @Spy +// QueryController qc; + + private String entity; + private String response = ""; + private String linkHeader = "<; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\">"; + private List entities; + + @Before + public void setup() { + // PowerMockito.mockStatic(HttpUtils.class); + MockitoAnnotations.initMocks(this); +// this.mockMvc = MockMvcBuilders.standaloneSetup(qc).build(); + //@formatter:off + entity="[{\r\n" + + " \"http://example.org/vehicle/brandName\": [{\r\n" + + " \"@type\": [\"https://uri.etsi.org/ngsi-ld/Property\"],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\": [{\r\n" + + " \"@value\": \"Mercedes\"\r\n" + + " }]\r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/createdAt\": [{\r\n" + + " \"@type\": \"https://uri.etsi.org/ngsi-ld/DateTime\",\r\n" + + " \"@value\": \"2017-07-29T12:00:04Z\"\r\n" + + " }],\r\n" + + " \"@id\": \"urn:ngsi-ld:Vehicle:A100\",\r\n" + + " \"http://example.org/common/isParked\": [{\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasObject\": [{\r\n" + + " \"@id\": \"urn:ngsi-ld:OffStreetParking:Downtown1\"\r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/observedAt\": [{\r\n" + + " \"@type\": \"https://uri.etsi.org/ngsi-ld/DateTime\",\r\n" + + " \"@value\": \"2017-07-29T12:00:04Z\"\r\n" + + " }],\r\n" + + " \"http://example.org/common/providedBy\": [{\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasObject\": [{\r\n" + + " \"@id\": \"urn:ngsi-ld:Person:Bob\"\r\n" + + " }],\r\n" + + " \"@type\": [\"https://uri.etsi.org/ngsi-ld/Relationship\"]\r\n" + + " }],\r\n" + + " \"@type\": [\"https://uri.etsi.org/ngsi-ld/Relationship\"]\r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/location\": [{\r\n" + + " \"@type\": [\"https://uri.etsi.org/ngsi-ld/GeoProperty\"],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\": [{\r\n" + + " \"@value\": \"{ \\\"type\\\":\\\"Point\\\", \\\"coordinates\\\":[ -8.5, 41.2 ] }\"\r\n" + + " }]\r\n" + + " }],\r\n" + + " \"http://example.org/vehicle/speed\": [{\r\n" + + " \"@type\": [\"https://uri.etsi.org/ngsi-ld/Property\"],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\": [{\r\n" + + " \"@value\": 80\r\n" + + " }]\r\n" + + " }],\r\n" + + " \"@type\": [\"http://example.org/vehicle/Vehicle\"]\r\n" + + "}]"; + + response="{\r\n" + + " \"id\": \"urn:ngsi-ld:Vehicle:A100\",\r\n" + + " \"type\": \"Vehicle\",\r\n" + + " \"brandName\": {\r\n" + + " \"type\": \"Property\",\r\n" + + " \"value\": \"Mercedes\"\r\n" + + " },\r\n" + + " \"isParked\": {\r\n" + + " \"type\": \"Relationship\",\r\n" + + " \"object\": \"urn:ngsi-ld:OffStreetParking:Downtown1\",\r\n" + + " \"observedAt\": \"2017-07-29T12:00:04Z\",\r\n" + + " \"providedBy\": {\r\n" + + " \"type\": \"Relationship\",\r\n" + + " \"object\": \"urn:ngsi-ld:Person:Bob\"\r\n" + + " }\r\n" + + " },\r\n" + + " \"speed\": {\r\n" + + " \"type\": \"Property\",\r\n" + + " \"value\": 80\r\n" + + " },\r\n" + + " \"createdAt\": \"2017-07-29T12:00:04Z\",\r\n" + + " \"location\": {\r\n" + + " \"type\": \"GeoProperty\",\r\n" + + " \"value\": { \"type\":\"Point\", \"coordinates\":[ -8.5, 41.2 ] }\r\n" + + " }\r\n" + + "}"; + + entities=new ArrayList( + Arrays.asList("{\r\n" + + " \"http://example.org/vehicle/brandName\" : [ {\r\n" + + " \"@value\" : \"Volvo\"\r\n" + + " } ],\r\n" + + " \"@id\" : \"urn:ngsi-ld:Vehicle:A100\",\r\n" + + " \"http://example.org/vehicle/speed\" : [ {\r\n" + + " \"https://uri.etsi.org/ngsi-ld/instanceId\" : [ {\r\n" + + " \"@value\" : \"be664aaf-a7af-4a99-bebc-e89528238abf\"\r\n" + + " } ],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/observedAt\" : [ {\r\n" + + " \"@value\" : \"2018-06-01T12:03:00Z\",\r\n" + + " \"@type\" : \"https://uri.etsi.org/ngsi-ld/DateTime\"\r\n" + + " } ],\r\n" + + " \"@type\" : [ \"https://uri.etsi.org/ngsi-ld/Property\" ],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\" : [ {\r\n" + + " \"@value\" : \"120\"\r\n" + + " } ]\r\n" + + " }, {\r\n" + + " \"https://uri.etsi.org/ngsi-ld/instanceId\" : [ {\r\n" + + " \"@value\" : \"d3ac28df-977f-4151-a432-dc088f7400d7\"\r\n" + + " } ],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/observedAt\" : [ {\r\n" + + " \"@value\" : \"2018-08-01T12:05:00Z\",\r\n" + + " \"@type\" : \"https://uri.etsi.org/ngsi-ld/DateTime\"\r\n" + + " } ],\r\n" + + " \"@type\" : [ \"https://uri.etsi.org/ngsi-ld/Property\" ],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/hasValue\" : [ {\r\n" + + " \"@value\" : \"80\"\r\n" + + " } ]\r\n" + + " } ],\r\n" + + " \"@type\" : [ \"http://example.org/vehicle/Vehicle\" ]\r\n" + + "}")); + //@formatter:on + } + + @After + public void tearDown() { + entity = null; + response = null; + } + + @Test + public void getEntityTest() throws Exception { + try { + + ResponseEntity responseEntity = ResponseEntity.status(HttpStatus.OK).header("location", + "<; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\">") + .body(response); + + Mockito.doReturn(entity).when(queryService).retrieveEntity(any(String.class), any(List.class), + any(boolean.class), any(boolean.class)); +// Mockito.doReturn(responseEntity).when(qc).generateReply(any(), any(), any()); + QueryResult result = new QueryResult(entities, null, ErrorType.None, -1, true); + Mockito.doReturn(result).when(queryService).getData(any(), any(), any(), any(), any(), any(), any()); + mockMvc.perform(get("/ngsi-ld/v1/entities/{entityId}", "urn:ngsi-ld:Vehicle:A100").accept(AppConstants.NGB_APPLICATION_JSON)) + .andExpect(status().isOk()).andExpect(jsonPath("$.id").value("urn:ngsi-ld:Vehicle:A100")); +// .andExpect(redirectedUrl(linkHeader)).andDo(print()); + verify(queryService, times(1)).getData(any(), any(), any(), any(), any(), any(), any()); +// verify(qc, times(1)).generateReply(any(), any(), any()); + + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + + } + + @Test() + public void getEntityNotFoundTest() { + try { + + Mockito.doReturn("null").when(queryService).retrieveEntity(any(String.class), any(List.class), + any(boolean.class), any(boolean.class)); + //QueryResult result = new QueryResult(entities, null, ErrorType.None, -1, true); + Mockito.doThrow(new ResponseException(ErrorType.NotFound)).when(queryService).getData(any(), any(), any(), any(), any(), any(), any()); + + mockMvc.perform(get("/ngsi-ld/v1/entities/{entityId}", "urn:ngsi-ld:Vehicle:A100").accept(AppConstants.NGB_APPLICATION_JSON)) + .andExpect(status().isNotFound()).andExpect(jsonPath("$.title").value("Resource not found.")) + .andDo(print()); + verify(queryService, times(1)).getData(any(), any(), any(), any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + @Test() + public void getAttrsTest() { + try { + //QueryController qc=Mockito.mock(QueryController.class); + Set linkHeaders = new HashSet(); + Map entityContext = new HashMap(); + String jsonLdResolved = "{\r\n" + " \"http://example.org/vehicle/brandName\": [{\r\n" + + " \"@value\": 0\r\n" + " }]\r\n" + "}"; + + ResponseEntity responseEntity = ResponseEntity.status(HttpStatus.OK).header("location", + "<; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\">") + .body(response); + linkHeaders.add("http://localhost:9090/ngsi-ld/contextes/urn:ngsi-ld:Vehicle:A100"); + entityContext.put("urn:ngsi-ld:Vehicle:A100", "{\"brandName\":\"http://example.org/vehicle/brandName\"}"); + + // when(queryService.retrieveEntity(any(),any(),any(),any())).thenReturn(entity); + // when(HttpUtils.parseLinkHeader(any(HttpServletRequest.class), + // NGSIConstants.HEADER_REL_LDCONTEXT)).thenReturn(linkHeaders); + + // when(contextResolver.getContext(any())).thenReturn(entityContext); + // when(contextResolver.expandPayload(any())).thenReturn(jsonLdResolved); + // when(HttpUtils.generateReply(any(), any(), any(), any(), any(), + // any())).thenReturn(responseEntity); + + Mockito.doReturn(entity).when(queryService).retrieveEntity(any(String.class), any(List.class), + any(boolean.class), any(boolean.class)); +// Mockito.doReturn(entityContext).when(contextResolver).getContext(any()); +// Mockito.doReturn(responseEntity).when(qc).generateReply(any(), any(), any()); + QueryResult result = new QueryResult(entities, null, ErrorType.None, -1, true); + Mockito.doReturn(result).when(queryService).getData(any(), any(), any(), any(), any(), any(), any()); + mockMvc.perform(get("/ngsi-ld/v1/entities/{entityId}?attrs=brandName", "urn:ngsi-ld:Vehicle:A100") + .accept(AppConstants.NGB_APPLICATION_JSON)).andExpect(status().isOk()) + .andExpect(jsonPath("$.id").value("urn:ngsi-ld:Vehicle:A100")).andDo(print()); + // verify(HttpUtils.parseLinkHeader(any(HttpServletRequest.class), + // NGSIConstants.HEADER_REL_LDCONTEXT)); + + Mockito.verify(queryService, times(1)).getData(any(), any(), any(), any(), any(), any(), any()); +// Mockito.verify(qc, times(1)).generateReply(any(), any(), any()); + + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + @Test() + public void getAttrsFailureTest() { + try { + // when(queryService.retrieveEntity(any(),any(),any(),any())).thenReturn(entity); + //Mockito.doReturn("null").when(queryService).retrieveEntity(any(String.class), any(List.class), + // any(boolean.class), any(boolean.class)); + //QueryResult result = new QueryResult(entities, null, ErrorType.None, -1, true); + Mockito.doThrow(new ResponseException(ErrorType.NotFound)).when(queryService).getData(any(), any(), any(), any(), any(), any(), any()); + mockMvc.perform(get("/ngsi-ld/v1/entities/{entityId}?attrs=brandName", "urn:ngsi-ld:Vehicle:A100") + .accept(AppConstants.NGB_APPLICATION_JSON)).andExpect(status().isNotFound()) + .andExpect(jsonPath("$.title").value("Resource not found.")).andDo(print()); + // verify(HttpUtils.parseLinkHeader(any(HttpServletRequest.class), + // NGSIConstants.HEADER_REL_LDCONTEXT)); + // Mockito.verify(queryService,times(1)).retrieveEntity(any(String.class), + // any(List.class), any(boolean.class), any(boolean.class)); + verify(queryService, times(1)).getData(any(), any(), any(), any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + @Test() + public void serviceThrowsAlreadyExistsTest() { + ResponseException responseException = new ResponseException(ErrorType.BadRequestData); + + try { + Mockito.doThrow(responseException).when(queryService).retrieveEntity(any(String.class), any(List.class), + any(boolean.class), any(boolean.class)); + Mockito.doThrow(new ResponseException(ErrorType.BadRequestData)).when(queryService).getData(any(), any(), any(), any(), any(), any(), any()); + mockMvc.perform(get("/ngsi-ld/v1/entities/{entityId}", "urn:ngsi-ld:Vehicle:A100").accept(AppConstants.NGB_APPLICATION_JSON)) + .andExpect(status().isBadRequest()).andExpect(jsonPath("$.title").value("Bad Request Data.")) + .andDo(print()); + verify(queryService, times(1)).getData(any(), any(), any(), any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + + } + + //@Test TODO : Failing because of request URI check in controller class. + public void getAttribOfEntityTest() { + try { + Set linkHeaders = new HashSet(); + Map entityContext = new HashMap(); + linkHeaders.add("http://localhost:9090/ngsi-ld/contextes/urn:ngsi-ld:Vehicle:A100"); + entityContext.put("urn:ngsi-ld:Vehicle:A100", "{\"brandName\":\"http://example.org/vehicle/brandName\"}"); + String resolveQueryLdContext = "http://example.org/vehicle/brandName"; + String response = "{\r\n" + " \"id\": \"urn:ngsi-ld:Vehicle:A100\",\r\n" + " \"type\": \"Vehicle\",\r\n" + + " \"brandName\": {\r\n" + " \"type\": \"Property\",\r\n" + + " \"value\": \"Mercedes\"\r\n" + " }\r\n" + "}\r\n" + "\r\n" + ""; + ResponseEntity responseEntity = ResponseEntity.status(HttpStatus.OK).header("location", + "<; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\">") + .body(response); + + HttpServletRequest request = Mockito.mock(HttpServletRequest.class); + String res = "/"; + // when(HttpUtils.parseLinkHeader(any(HttpServletRequest.class), + // NGSIConstants.HEADER_REL_LDCONTEXT)).thenReturn(linkHeaders); + // when(contextResolver.getContext(any())).thenReturn(entityContext); + // when(paramsResolver.resolveQueryLdContext(any(), + // any())).thenReturn(resolveQueryLdContext); + // when(queryService.retrieveEntity(any(),any(),any(),any())).thenReturn(entity); + // when(HttpUtils.generateReply(any(), any(), any(), any(), any(), + // any())).thenReturn(responseEntity); + +// Mockito.doReturn(entityContext).when(contextResolver).getContext(any()); + Mockito.doReturn(resolveQueryLdContext).when(paramsResolver).expandAttribute(any(), any()); + Mockito.doReturn(entity).when(queryService).retrieveEntity(any(String.class), any(List.class), + any(boolean.class), any(boolean.class)); + // Mockito.doReturn(res).when(request).getRequestURI(); + Mockito.when(request.getRequestURI()).thenReturn(res); +// Mockito.doReturn(responseEntity).when(qc).generateReply(any(), any()); + + mockMvc.perform(get("/ngsi-ld/v1/entities/{entityId}/attrs/{attrsId}", "urn:ngsi-ld:Vehicle:A100", "brandName") + .accept(AppConstants.NGB_APPLICATION_JSON)).andExpect(status().isOk()) + .andExpect(jsonPath("$.id").value("urn:ngsi-ld:Vehicle:A100")).andDo(print()); + + // verify(HttpUtils.parseLinkHeader(any(HttpServletRequest.class), + // NGSIConstants.HEADER_REL_LDCONTEXT)); +// verify(contextResolver, times(1)).getContext(any()); + verify(paramsResolver, times(1)).expandAttribute(any(), any()); + verify(queryService, times(1)).retrieveEntity(any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + //@Test + public void badRequestForAttribOfEntityTest() { + try { + Set linkHeaders = new HashSet(); + Map entityContext = new HashMap(); + linkHeaders.add("http://localhost:9090/ngsi-ld/contextes/urn:ngsi-ld:Vehicle:A100"); + entityContext.put("urn:ngsi-ld:Vehicle:A100", "{\"brandName\":\"http://example.org/vehicle/brandName\"}"); + String resolveQueryLdContext = "http://example.org/vehicle/brandName"; + + // when(HttpUtils.parseLinkHeader(any(HttpServletRequest.class), + // NGSIConstants.HEADER_REL_LDCONTEXT)).thenReturn(linkHeaders); + // when(contextResolver.getContext(any())).thenReturn(entityContext); + // when(paramsResolver.resolveQueryLdContext(any(), + // any())).thenReturn(resolveQueryLdContext); + // when(queryService.retrieveEntity(any(),any(),any(),any())).thenThrow(new + // ResponseException(ErrorType.BadRequestData)); + +// Mockito.doReturn(entityContext).when(contextResolver).getContext(any()); +// Mockito.doReturn(resolveQueryLdContext).when(paramsResolver).resolveQueryLdContext(any(), any()); + Mockito.doThrow(new ResponseException(ErrorType.BadRequestData)).when(queryService) + .retrieveEntity(any(String.class), any(List.class), any(boolean.class), any(boolean.class)); + + mockMvc.perform(get("/ngsi-ld/v1/entities/{entityId}/attrs/{attrsId}", "urn%3Angsi-ld%3AVehicle%3AA100", "brandName") + .accept(AppConstants.NGB_APPLICATION_JSON)).andExpect(status().isBadRequest()) + .andExpect(jsonPath("$.title").value("Bad Request Data.")).andDo(print()); + + // verify(HttpUtils.parseLinkHeader(any(HttpServletRequest.class), + // NGSIConstants.HEADER_REL_LDCONTEXT)); + // verify(contextResolver,times(1)).getContext(any()); + // verify(paramsResolver,times(1)).resolveQueryLdContext(any(), any()); + // verify(queryService,times(1)).retrieveEntity(any(String.class), + // any(List.class), any(boolean.class), any(boolean.class)); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + // @Test TODO : Failing because of request URI check in controller class. + public void exception500ForAttribOfEntityTest() { + try { + // when(queryService.retrieveEntity(any(),any(),any(),any())).thenThrow(new + // ResponseException(ErrorType.InternalError)); + + Mockito.doThrow(new ResponseException(ErrorType.InternalError)).when(queryService) + .retrieveEntity(any(String.class), any(List.class), any(boolean.class), any(boolean.class)); + + mockMvc.perform(get("/ngsi-ld/v1/entities/{entityId}/attrs/{attrsId}/", "urn:ngsi-ld:Vehicle:A100", "brandName") + .accept(AppConstants.NGB_APPLICATION_JSON)).andExpect(status().isInternalServerError()) + .andExpect(jsonPath("$.title").value("Internal error.")).andDo(print()); + + // verify(HttpUtils.parseLinkHeader(any(HttpServletRequest.class), + // NGSIConstants.HEADER_REL_LDCONTEXT)); + // verify(contextResolver,times(1)).getContext(any()); + // verify(paramsResolver,times(1)).resolveQueryLdContext(any(), any()); + // verify(queryService,times(1)).retrieveEntity(any(),any(), any(),any()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + @Test + public void getAllEntityBadRequestTest() { + try { + mockMvc.perform(get("/ngsi-ld/v1/entities/").accept(AppConstants.NGB_APPLICATION_JSON)).andExpect(status().isBadRequest()) + .andExpect(jsonPath("$.title").value("Bad Request Data.")).andDo(print()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + @Test + public void getAllEntitySuccessTest() { + try { + String resolveQueryLdContext = "http://example.org/vehicle/brandName"; + Set linkHeaders = new HashSet(); + linkHeaders.add("http://localhost:9090/ngsi-ld/contextes/urn:ngsi-ld:Vehicle:A100"); + ResponseEntity responseEntity = ResponseEntity.status(HttpStatus.OK).header("location", + "<; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\">") + .body(response); + + QueryResult result = new QueryResult(entities, null, ErrorType.None, -1, true); + // when(HttpUtils.parseLinkHeader(any(HttpServletRequest.class), + // NGSIConstants.HEADER_REL_LDCONTEXT)).thenReturn(linkHeaders); + // when(paramsResolver.resolveQueryLdContext(any(), + // any())).thenReturn(resolveQueryLdContext); + // when(paramsResolver.getQueryParamsFromUriQuery(any(),any())).thenReturn(new + // QueryParams().withAttrs("brandName")); + // when(queryService.getData(any(), any(), any(), null, null, + // null)).thenReturn(result); + +// Mockito.doReturn(resolveQueryLdContext).when(paramsResolver).resolveQueryLdContext(any(), any()); +// Mockito.doReturn(new QueryParams().withAttrs("brandName")).when(paramsResolver) +// .getQueryParamsFromUriQuery(any(), any()); + Mockito.doReturn(result).when(queryService).getData(any(), any(), any(), any(), any(), any(), any()); +// Mockito.doReturn(responseEntity).when(qc).generateReply(any(), any()); + + mockMvc.perform(get("/ngsi-ld/v1/entities/?attrs=brandName").accept(AppConstants.NGB_APPLICATION_JSON)) + .andExpect(status().isOk()).andDo(print()); + // verify(HttpUtils.parseLinkHeader(any(HttpServletRequest.class), + // NGSIConstants.HEADER_REL_LDCONTEXT)); + // verify(paramsResolver,times(1)).resolveQueryLdContext(any(), any()); +// verify(paramsResolver, times(1)).getQueryParamsFromUriQuery(any(), any()); +// verify(paramsResolver, times(1)).getQueryParamsFromUriQuery(any(), any()); + verify(queryService, times(1)).getData(any(), any(), any(), any(), any(), any(), any()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + +} \ No newline at end of file diff --git a/scorpio-broker/Core/SubscriptionManager/.gitignore b/scorpio-broker/Core/SubscriptionManager/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..1f31adb1dcec3b9e26bcfdd4ca40a9050e4840e7 --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/.gitignore @@ -0,0 +1,5 @@ +/target/ +/logs/ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/Core/SubscriptionManager/dockerfile4maven b/scorpio-broker/Core/SubscriptionManager/dockerfile4maven new file mode 100644 index 0000000000000000000000000000000000000000..8a9cad9642bdedb91f078bb58edd160842fd8ec8 --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/dockerfile4maven @@ -0,0 +1,13 @@ +FROM openjdk:22-ea-21-jdk-slim + +WORKDIR /usr/src/scorpio +ARG JAR_FILE_BUILD +ARG JAR_FILE_RUN +ENV JAR_FILE_RUN ${JAR_FILE_RUN} + +COPY target/${JAR_FILE_BUILD} ./${JAR_FILE_RUN} +COPY src/main/resources/application-dist.yml ./config/application.yml + +ENV spring_args "" + +CMD java -jar $JAR_FILE_RUN ${spring_args} diff --git a/scorpio-broker/Core/SubscriptionManager/pom.xml b/scorpio-broker/Core/SubscriptionManager/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..52e483f1eca378b500a7bd0728b48c3e4fde9bc7 --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/pom.xml @@ -0,0 +1,120 @@ + + 4.0.0 + SubscriptionManager + jar + + eu.neclab.ngsildbroker + BrokerParent + 1.0.0-SNAPSHOT + ../../BrokerParent + + 1.0.0-SNAPSHOT + + + docker + + + + com.spotify + dockerfile-maven-plugin + 1.4.12 + + + default + + build + push + + + + + dockerfile4maven + scorpiobroker/scorpio + ${project.artifactId}_${project.version} + + ${project.build.finalName}.jar + ${project.artifactId}.jar + + + + + + + + + + + com.google.guava + guava + 25.1-jre + + + + com.google.code.gson + gson + + + + org.locationtech.spatial4j + spatial4j + 0.7 + + + + org.locationtech.jts + jts-core + 1.15.1 + + + org.springframework.boot + spring-boot-starter-websocket + + + org.springframework.boot + spring-boot-starter-reactor-netty + + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-logging + + + + + + + + com.hivemq + hivemq-mqtt-client-shaded + 1.1.3 + + + org.springframework.boot + spring-boot-starter-jdbc + + + org.springframework.boot + spring-boot-starter-logging + + + + + org.postgresql + postgresql + + + + org.mapdb + mapdb + 3.0.8 + + + diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/SubscriptionHandler.java b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/SubscriptionHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..d224bece7bae6bae6cc5f62e894c399f59e58dc6 --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/SubscriptionHandler.java @@ -0,0 +1,65 @@ +package eu.neclab.ngsildbroker.subscriptionmanager; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.cloud.stream.annotation.EnableBinding; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Import; + +import eu.neclab.ngsildbroker.commons.ldcontext.AtContextProducerChannel; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.ngsiqueries.QueryParser; +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; +import eu.neclab.ngsildbroker.commons.securityConfig.SecurityConfig; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaConfig; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.swaggerConfig.SwaggerConfigDetails; + +@SpringBootApplication +@EnableBinding({ AtContextProducerChannel.class }) +@Import({KafkaConfig.class, SwaggerConfigDetails.class}) +public class SubscriptionHandler { + + @Value("${atcontext.url}") + String atContextServerUrl; + + public static void main(String[] args) { + SpringApplication.run(SubscriptionHandler.class, args); + + } + + @Bean("smops") + KafkaOps ops() { + return new KafkaOps(); + } + + @Bean("smconRes") + ContextResolverBasic conRes() { + return new ContextResolverBasic(atContextServerUrl); + } + + @Bean("smsecurityConfig") + SecurityConfig securityConfig() { + return new SecurityConfig(); + } +// @Bean("smrestTemp") +// RestTemplate restTemp() { +// return new RestTemplate(); +// } +// + @Bean("smresourceConfigDetails") + ResourceConfigDetails resourceConfigDetails() { + return new ResourceConfigDetails(); + } + + @Bean("smparamsResolver") + ParamsResolver paramsResolver() { + return new ParamsResolver(); + } + @Bean("smqueryParser") + QueryParser queryParser() { + return new QueryParser(); + } +} diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/config/SubscriptionManagerResourceConfigurer.java b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/config/SubscriptionManagerResourceConfigurer.java new file mode 100644 index 0000000000000000000000000000000000000000..02e4c790900f37df08cd99ec1a1afee1ce9826eb --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/config/SubscriptionManagerResourceConfigurer.java @@ -0,0 +1,27 @@ +package eu.neclab.ngsildbroker.subscriptionmanager.config; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer; +import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter; +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; + +/** + * REST API Resource Server. + */ +@Configuration +@EnableWebSecurity +@EnableResourceServer +@EnableGlobalMethodSecurity(prePostEnabled = true) // Allow method annotations like @PreAuthorize +public class SubscriptionManagerResourceConfigurer extends ResourceServerConfigurerAdapter { + @Autowired + private ResourceConfigDetails resourceConfigDetails; + + @Override + public void configure(HttpSecurity http) throws Exception { + resourceConfigDetails.ngbSecurityConfig(http); + } +} diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/NotificationController.java b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/NotificationController.java new file mode 100644 index 0000000000000000000000000000000000000000..e3023adb06794664c195026bed08397e44ea8d37 --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/NotificationController.java @@ -0,0 +1,42 @@ +package eu.neclab.ngsildbroker.subscriptionmanager.controller; + +import javax.servlet.http.HttpServletRequest; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.Notification; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.interfaces.SubscriptionManager; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; + +@RestController +@RequestMapping("/remotenotify") +public class NotificationController { + + @Autowired + SubscriptionManager subscriptionManager; + + @Autowired + ContextResolverBasic resolver; + + @RequestMapping(method=RequestMethod.POST, value = "/{id}") + public void notify(HttpServletRequest req, @RequestBody String payload, @PathVariable(name = NGSIConstants.QUERY_PARAMETER_ID, required = false) String id) { + try { + subscriptionManager.remoteNotify(id, DataSerializer.getNotification(resolver.expand(payload, HttpUtils.getAtContext(req),true, AppConstants.SUBSCRIPTIONS_URL_ID))); + } catch (ResponseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + +} diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionController.java b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionController.java new file mode 100644 index 0000000000000000000000000000000000000000..c0789b6f35e9a677a7d00bbbf77295fbbe5d7c78 --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionController.java @@ -0,0 +1,207 @@ +package eu.neclab.ngsildbroker.subscriptionmanager.controller; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.List; + +import javax.annotation.PostConstruct; +import javax.servlet.http.HttpServletRequest; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseBody; +import org.springframework.web.bind.annotation.RestController; + +import com.netflix.discovery.EurekaClient; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.RestResponse; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.datatypes.SubscriptionRequest; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.interfaces.SubscriptionManager; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.ngsiqueries.QueryParser; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; + +@RestController +@RequestMapping("/ngsi-ld/v1/subscriptions") +public class SubscriptionController { + + private final static Logger logger = LogManager.getLogger(SubscriptionController.class); + + @Autowired + SubscriptionManager manager; + + @Autowired + @Qualifier("smconRes") + ContextResolverBasic contextResolver; + + @Autowired + @Qualifier("smops") + KafkaOps kafkaOps; + + @Autowired + EurekaClient eurekaClient; + + @Value("${atcontext.url}") + String atContextServerUrl; + + @Autowired + @Qualifier("smqueryParser") + QueryParser queryParser; + + @Autowired + @Qualifier("smparamsResolver") + ParamsResolver ldTools; + + ResponseException badRequest = new ResponseException(ErrorType.BadRequestData); + + ResponseEntity badRequestResponse = ResponseEntity.status(badRequest.getHttpStatus()) + .body(new RestResponse(badRequest).toJsonBytes()); + + private HttpUtils httpUtils; + + @PostConstruct + private void setup() { + this.httpUtils = HttpUtils.getInstance(contextResolver); + } + + @RequestMapping(method = RequestMethod.POST) + @ResponseBody + public ResponseEntity subscribeRest(HttpServletRequest request, @RequestBody String payload) { + logger.trace("subscribeRest() :: started"); + Subscription subscription = null; + + try { + HttpUtils.doPreflightCheck(request, payload); + List context = HttpUtils.getAtContext(request); + // System.out.println("RECEIVING SUBSCRIPTION: " + payload + " at " + + // System.currentTimeMillis()); + subscription = contextResolver.expandSubscription(payload, context); + SubscriptionRequest subRequest = new SubscriptionRequest(subscription, context); + URI subId = manager.subscribe(subRequest); + + logger.trace("subscribeRest() :: completed"); + return ResponseEntity.created(new URI(AppConstants.SUBSCRIPTIONS_URL + subId.toString())).build(); + } catch (ResponseException e) { + logger.error("Exception ::", e); + return ResponseEntity.status(e.getHttpStatus()).body(new RestResponse(e).toJsonBytes()); + } catch (URISyntaxException e) { + logger.error("Exception ::", e); + return ResponseEntity.status(HttpStatus.CONFLICT).body(subscription.getId().toString().getBytes()); + } + } + + @RequestMapping(method = RequestMethod.GET, value = "/") + public ResponseEntity getAllSubscriptions(HttpServletRequest request, + @RequestParam(required = false, name = "limit", defaultValue = "0") int limit) throws ResponseException { + logger.trace("getAllSubscriptions() :: started"); + List result = null; + result = manager.getAllSubscriptions(limit); + logger.trace("getAllSubscriptions() :: completed"); + + return httpUtils.generateReply(request, DataSerializer.toJson(result)); + + } + + @RequestMapping(method = RequestMethod.GET, value = "/{id}") + public ResponseEntity getSubscriptions(HttpServletRequest request, + @PathVariable(name = NGSIConstants.QUERY_PARAMETER_ID, required = true) String id, + @RequestParam(required = false, name = "limit", defaultValue = "0") int limit) { + try { + logger.trace("call getSubscriptions() ::"); + return httpUtils.generateReply(request, DataSerializer.toJson(manager.getSubscription(id))); + + } catch (ResponseException e) { + logger.error("Exception ::", e); + return ResponseEntity.status(e.getHttpStatus()).body(new RestResponse(e).toJsonBytes()); + } + + } + + @RequestMapping(method = RequestMethod.DELETE, value = "/{id}") + public ResponseEntity deleteSubscription(HttpServletRequest request, + @PathVariable(name = NGSIConstants.QUERY_PARAMETER_ID, required = true) URI id) { + try { + logger.trace("call deleteSubscription() ::"); + // System.out.println("DELETING SUBSCRIPTION: " + id + " at " + + // System.currentTimeMillis()); + manager.unsubscribe(id); + } catch (ResponseException e) { + logger.error("Exception ::", e); + return ResponseEntity.status(e.getHttpStatus()).body(new RestResponse(e).toJsonBytes()); + } + return ResponseEntity.noContent().build(); + } + + @RequestMapping(method = RequestMethod.PATCH, value = "/{" + NGSIConstants.QUERY_PARAMETER_ID + "}") + public ResponseEntity updateSubscription(HttpServletRequest request, + @PathVariable(name = NGSIConstants.QUERY_PARAMETER_ID, required = true) URI id, + @RequestBody String payload) { + logger.trace("call updateSubscription() ::"); + + try { + HttpUtils.doPreflightCheck(request, payload); + List context = HttpUtils.getAtContext(request); + String resolved = contextResolver.expand(payload, context, true, AppConstants.SUBSCRIPTIONS_URL_ID); + Subscription subscription = DataSerializer.getSubscription(resolved); + if (subscription.getId() == null) { + subscription.setId(id); + } + SubscriptionRequest subscriptionRequest = new SubscriptionRequest(subscription, context); + + // expandSubscriptionAttributes(subscription, context); + if (resolved == null || subscription == null || !id.equals(subscription.getId())) { + return badRequestResponse; + } + manager.updateSubscription(subscriptionRequest); + } catch (ResponseException e) { + logger.error("Exception ::", e); + return ResponseEntity.status(e.getHttpStatus()).body(new RestResponse(e).toJsonBytes()); + } + return ResponseEntity.noContent().build(); + } + + // private void expandSubscriptionAttributes(Subscription subscription, + // List context) + // throws ResponseException { + // for (EntityInfo info : subscription.getEntities()) { + // if (info.getType() != null && !info.getType().trim().equals("")) { + // info.setType(ldTools.expandAttribute(info.getType(), context)); + // } + // } + // if (subscription.getAttributeNames() != null) { + // ArrayList newAttribNames = new ArrayList(); + // for (String attrib : subscription.getAttributeNames()) { + // newAttribNames.add(ldTools.expandAttribute(attrib, context)); + // } + // subscription.setAttributeNames(newAttribNames); + // } + // if (subscription.getNotification().getAttributeNames() != null) { + // ArrayList newAttribNames = new ArrayList(); + // for (String attrib : subscription.getNotification().getAttributeNames()) { + // newAttribNames.add(ldTools.expandAttribute(attrib, context)); + // } + // subscription.getNotification().setAttributeNames(newAttribNames); + // + // } + // + // } + +} diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionWebsocketController.java b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionWebsocketController.java new file mode 100644 index 0000000000000000000000000000000000000000..e30a909af709e4a26e31fa7b9aba17fb063eb5ad --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionWebsocketController.java @@ -0,0 +1,28 @@ +package eu.neclab.ngsildbroker.subscriptionmanager.controller; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.messaging.Message; +import org.springframework.messaging.handler.annotation.MessageMapping; +import org.springframework.messaging.handler.annotation.SendTo; +import org.springframework.messaging.simp.SimpMessagingTemplate; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Controller; + +@Controller +public class SubscriptionWebsocketController { + + + @Autowired + public SubscriptionWebsocketController() { + } + + @MessageMapping("/incoming") + @SendTo("/topic/outgoing") + public String incoming(Message message) { + System.out.println(message.getPayload()); + return "blaaaaa"; + } + + + +} diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/BaseNotificationHandler.java b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/BaseNotificationHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..aaa52fb8b78e711b3fd4953bdecbf42da24e3516 --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/BaseNotificationHandler.java @@ -0,0 +1,597 @@ +package eu.neclab.ngsildbroker.subscriptionmanager.service; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.net.URI; +import java.security.Principal; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Timer; +import java.util.TimerTask; + +import javax.servlet.AsyncContext; +import javax.servlet.DispatcherType; +import javax.servlet.RequestDispatcher; +import javax.servlet.ServletContext; +import javax.servlet.ServletException; +import javax.servlet.ServletInputStream; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.servlet.http.HttpSession; +import javax.servlet.http.HttpUpgradeHandler; +import javax.servlet.http.Part; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.http.ResponseEntity.BodyBuilder; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.common.collect.ArrayListMultimap; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.Notification; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.interfaces.NotificationHandler; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.tools.EntityTools; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; + +public abstract class BaseNotificationHandler implements NotificationHandler { + + + protected abstract void sendReply(ResponseEntity reply, URI callback, Map clientSettings) throws Exception; + private final Logger logger = LogManager.getLogger(this.getClass()); + private SubscriptionService subscriptionManagerService; + protected HttpUtils httpUtils; + private ObjectMapper objectMapper; + + public BaseNotificationHandler(SubscriptionService subscriptionManagerService, ContextResolverBasic contextResolver, + ObjectMapper objectMapper) { + this.subscriptionManagerService = subscriptionManagerService; + this.objectMapper = objectMapper; + httpUtils = HttpUtils.getInstance(contextResolver); + } + + HashMap subId2LastReport = new HashMap(); + ArrayListMultimap subId2Notifications = ArrayListMultimap.create(); + Timer executor = new Timer(true); + + @Override + public void notify(Notification notification, URI callback, String acceptHeader, String subId, List context, + int throttling, Map clientSettings) { + + ArrayList subIds = new ArrayList(); + subIds.add(subId); + + if (throttling > 0) { + synchronized (subId2Notifications) { + subId2Notifications.put(subId, notification); + Long lastReport = subId2LastReport.get(subId); + Long now = System.currentTimeMillis() / 1000; + if (lastReport == null) { + lastReport = 0l; + } + Long delay = 0l; + Long delta = now - lastReport; + if (delta < throttling) { + delay = delta; + } + executor.schedule(new TimerTask() { + + @Override + public void run() { + synchronized (subId2Notifications) { + Notification sendOutNotification = EntityTools + .squashNotifications(subId2Notifications.removeAll(subId)); + String jsonStr = DataSerializer.toJson(sendOutNotification); + Long now = System.currentTimeMillis(); + subId2LastReport.put(subId, now / 1000); + subscriptionManagerService.reportNotification(subId, now); + try { + logger.trace("Sending notification"); + logger.debug("Json to be sent: " + jsonStr); + ResponseEntity reply = generateNotificationResponse(acceptHeader, jsonStr, + context); + logger.debug("body to be sent: " + reply.getBody().toString()); + sendReply(reply, callback, clientSettings); + subscriptionManagerService.reportSuccessfulNotification(subId, now); + } catch (Exception e) { + logger.error("Exception ::", e); + subscriptionManagerService.reportFailedNotification(subId, now); + e.printStackTrace(); + } + } + + } + }, delay); + + } + + } else { + String jsonStr = DataSerializer.toJson(notification); + logger.debug("Sending notification"); + ResponseEntity reply; + long now = System.currentTimeMillis(); + try { + reply = generateNotificationResponse(acceptHeader, jsonStr, context); + logger.debug(new String(reply.getBody())); + sendReply(reply, callback, clientSettings); + subscriptionManagerService.reportNotification(subId, now); + } catch (Exception e) { + logger.error("Exception ::", e); + subscriptionManagerService.reportFailedNotification(subId, now); + e.printStackTrace(); + } + + } + + } + + private ResponseEntity generateNotificationResponse(String acceptHeader, String body, List context) + throws ResponseException { + HttpServletRequest request = new HttpServletRequest() { + + @Override + public Object getAttribute(String name) { + + return null; + } + + @Override + public Enumeration getAttributeNames() { + + return null; + } + + @Override + public String getCharacterEncoding() { + + return null; + } + + @Override + public void setCharacterEncoding(String env) throws UnsupportedEncodingException { + + } + + @Override + public int getContentLength() { + + return 0; + } + + @Override + public long getContentLengthLong() { + + return 0; + } + + @Override + public String getContentType() { + + return null; + } + + @Override + public ServletInputStream getInputStream() throws IOException { + + return null; + } + + @Override + public String getParameter(String name) { + + return null; + } + + @Override + public Enumeration getParameterNames() { + + return null; + } + + @Override + public String[] getParameterValues(String name) { + + return null; + } + + @Override + public Map getParameterMap() { + + return null; + } + + @Override + public String getProtocol() { + + return null; + } + + @Override + public String getScheme() { + + return null; + } + + @Override + public String getServerName() { + + return null; + } + + @Override + public int getServerPort() { + + return 0; + } + + @Override + public BufferedReader getReader() throws IOException { + + return null; + } + + @Override + public String getRemoteAddr() { + + return null; + } + + @Override + public String getRemoteHost() { + + return null; + } + + @Override + public void setAttribute(String name, Object o) { + + } + + @Override + public void removeAttribute(String name) { + + } + + @Override + public Locale getLocale() { + + return null; + } + + @Override + public Enumeration getLocales() { + + return null; + } + + @Override + public boolean isSecure() { + + return false; + } + + @Override + public RequestDispatcher getRequestDispatcher(String path) { + + return null; + } + + @Override + public String getRealPath(String path) { + + return null; + } + + @Override + public int getRemotePort() { + + return 0; + } + + @Override + public String getLocalName() { + + return null; + } + + @Override + public String getLocalAddr() { + + return null; + } + + @Override + public int getLocalPort() { + + return 0; + } + + @Override + public ServletContext getServletContext() { + + return null; + } + + @Override + public AsyncContext startAsync() throws IllegalStateException { + + return null; + } + + @Override + public AsyncContext startAsync(ServletRequest servletRequest, ServletResponse servletResponse) + throws IllegalStateException { + + return null; + } + + @Override + public boolean isAsyncStarted() { + + return false; + } + + @Override + public boolean isAsyncSupported() { + + return false; + } + + @Override + public AsyncContext getAsyncContext() { + + return null; + } + + @Override + public DispatcherType getDispatcherType() { + + return null; + } + + @Override + public String getAuthType() { + + return null; + } + + @Override + public Cookie[] getCookies() { + + return null; + } + + @Override + public long getDateHeader(String name) { + + return 0; + } + + @Override + public String getHeader(String name) { + if (HttpHeaders.ACCEPT.equals(name)) { + return acceptHeader; + } + return null; + } + + ArrayList acceptHeaderList = new ArrayList(); + { + acceptHeaderList.add(acceptHeader); + } + + @Override + public Enumeration getHeaders(String name) { + if (HttpHeaders.ACCEPT.equals(name)) { + return Collections.enumeration(acceptHeaderList); + } + return null; + } + + @Override + public Enumeration getHeaderNames() { + + return null; + } + + @Override + public int getIntHeader(String name) { + + return 0; + } + + @Override + public String getMethod() { + + return null; + } + + @Override + public String getPathInfo() { + + return null; + } + + @Override + public String getPathTranslated() { + + return null; + } + + @Override + public String getContextPath() { + + return null; + } + + @Override + public String getQueryString() { + + return null; + } + + @Override + public String getRemoteUser() { + + return null; + } + + @Override + public boolean isUserInRole(String role) { + + return false; + } + + @Override + public Principal getUserPrincipal() { + + return null; + } + + @Override + public String getRequestedSessionId() { + + return null; + } + + @Override + public String getRequestURI() { + + return null; + } + + @Override + public StringBuffer getRequestURL() { + + return null; + } + + @Override + public String getServletPath() { + + return null; + } + + @Override + public HttpSession getSession(boolean create) { + + return null; + } + + @Override + public HttpSession getSession() { + + return null; + } + + @Override + public String changeSessionId() { + + return null; + } + + @Override + public boolean isRequestedSessionIdValid() { + + return false; + } + + @Override + public boolean isRequestedSessionIdFromCookie() { + + return false; + } + + @Override + public boolean isRequestedSessionIdFromURL() { + + return false; + } + + @Override + public boolean isRequestedSessionIdFromUrl() { + + return false; + } + + @Override + public boolean authenticate(HttpServletResponse response) throws IOException, ServletException { + + return false; + } + + @Override + public void login(String username, String password) throws ServletException { + + } + + @Override + public void logout() throws ServletException { + + } + + @Override + public Collection getParts() throws IOException, ServletException { + + return null; + } + + @Override + public Part getPart(String name) throws IOException, ServletException { + + return null; + } + + @Override + public T upgrade(Class httpUpgradeHandlerClass) + throws IOException, ServletException { + + return null; + } + + }; + + ResponseEntity temp = httpUtils.generateReply(request, body, null, context); + JsonNode jsonTree; + try { + jsonTree = objectMapper.readTree(temp.getBody()); + if (jsonTree.get("data").isArray()) { + return temp; + } + ArrayNode dataArray = objectMapper.createArrayNode(); + + dataArray.add(jsonTree.get("data")); + ((ObjectNode) jsonTree).set("data", dataArray); + + BodyBuilder builder = ResponseEntity.status(HttpStatus.ACCEPTED); + + return builder.headers(temp.getHeaders()).body(objectMapper.writeValueAsBytes(jsonTree)); + + } catch (IOException e) { + // Left empty intentionally + } + return temp; + + } + +} diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/IntervalNotificationHandler.java b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/IntervalNotificationHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..8be9322f9c1d6d086c2f4bd72396d9a94a72a929 --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/IntervalNotificationHandler.java @@ -0,0 +1,132 @@ +package eu.neclab.ngsildbroker.subscriptionmanager.service; + +import java.io.ByteArrayInputStream; +import java.io.DataInputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Timer; +import java.util.TimerTask; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.header.internals.RecordHeader; +import org.springframework.kafka.requestreply.ReplyingKafkaTemplate; +import org.springframework.kafka.requestreply.RequestReplyFuture; +import org.springframework.kafka.support.KafkaHeaders; + +import com.google.common.collect.ArrayListMultimap; + +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.datatypes.Notification; +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.datatypes.SubscriptionRequest; +import eu.neclab.ngsildbroker.commons.interfaces.NotificationHandler; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.tools.EntityTools; + +public class IntervalNotificationHandler { + + private ArrayListMultimap id2Data = ArrayListMultimap.create(); + private HashMap id2Callback = new HashMap(); + private HashMap id2TimerTask = new HashMap(); + private Timer executor = new Timer(true); + + String requestTopic; + String queryResultTopic; + ReplyingKafkaTemplate kafkaTemplate; + + private NotificationHandler notificationHandler; + private ParamsResolver resolver; + + public IntervalNotificationHandler(NotificationHandler notificationHandler, + ReplyingKafkaTemplate kafkaTemplate, String queryResultTopic, String requestTopic, + ParamsResolver resolver) { + this.requestTopic = requestTopic; + this.queryResultTopic = queryResultTopic; + this.kafkaTemplate = kafkaTemplate; + this.notificationHandler = notificationHandler; + this.resolver = resolver; + } + + public void addSub(SubscriptionRequest subscriptionRequest) { + MyTimer timer = new MyTimer(subscriptionRequest); + id2TimerTask.put(subscriptionRequest.getSubscription().getId().toString(), timer); + executor.schedule(timer, 0, subscriptionRequest.getSubscription().getTimeInterval() * 1000); + } + + public List getFromStorageManager(String storageManagerQuery) throws Exception { + // create producer record + // logger.trace("getFromStorageManager() :: started"); + ProducerRecord record = new ProducerRecord(requestTopic, + storageManagerQuery.getBytes()); + // set reply topic in header + record.headers().add(new RecordHeader(KafkaHeaders.REPLY_TOPIC, queryResultTopic.getBytes())); + RequestReplyFuture sendAndReceive = kafkaTemplate.sendAndReceive(record); + // get consumer record + ConsumerRecord consumerRecord = sendAndReceive.get(); + // read from byte array + ByteArrayInputStream bais = new ByteArrayInputStream(consumerRecord.value()); + DataInputStream in = new DataInputStream(bais); + List entityList = new ArrayList(); + while (in.available() > 0) { + entityList.add(in.readUTF()); + } + // return consumer value + // logger.trace("getFromStorageManager() :: completed"); + return entityList; + } + + public void removeSub(String subId) { + if (id2TimerTask.containsKey(subId)) { + id2TimerTask.get(subId).cancel(); + id2TimerTask.remove(subId); + } + + } + + private class MyTimer extends TimerTask { + + private SubscriptionRequest subscriptionRequest; + private Subscription subscription; + private ArrayList paramStrings; + + public MyTimer(SubscriptionRequest subscriptionRequest) { + this.subscriptionRequest = subscriptionRequest; + this.subscription = subscriptionRequest.getSubscription(); + List params = resolver.getQueryParamsFromSubscription(subscription); + this.paramStrings = new ArrayList(); + for (QueryParams param : params) { + paramStrings.add(DataSerializer.toJson(param)); + } + } + + public void run() { + + ArrayList entities = new ArrayList(); + try { + for (String param : paramStrings) { + for (String entityString : getFromStorageManager(param)) { + entities.add(DataSerializer.getEntity(entityString)); + } + } + Notification notification = new Notification(EntityTools.getRandomID("notification:"), + System.currentTimeMillis(), subscription.getId(), entities, null, null, 0, true); + notificationHandler.notify(notification, subscription.getNotification().getEndPoint().getUri(), + subscription.getNotification().getEndPoint().getAccept(), subscription.getId().toString(), + subscriptionRequest.getContext(), 0, null); + } catch (URISyntaxException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + }; + + } +} diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/NotificationHandlerMQTT.java b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/NotificationHandlerMQTT.java new file mode 100644 index 0000000000000000000000000000000000000000..9b893b4852be6a11d0fe6b1d5cfd99567126bb4f --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/NotificationHandlerMQTT.java @@ -0,0 +1,124 @@ +package eu.neclab.ngsildbroker.subscriptionmanager.service; + +import java.net.URI; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import org.springframework.http.HttpHeaders; +import org.springframework.http.ResponseEntity; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hivemq.client.mqtt.MqttClient; +import com.hivemq.client.mqtt.datatypes.MqttQos; +import com.hivemq.client.mqtt.mqtt3.Mqtt3BlockingClient; +import com.hivemq.client.mqtt.mqtt3.Mqtt3Client; +import com.hivemq.client.mqtt.mqtt5.Mqtt5BlockingClient; +import com.hivemq.client.mqtt.mqtt5.Mqtt5Client; +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; + +public class NotificationHandlerMQTT extends BaseNotificationHandler { + + private final String CLIENT_ID = "ScorpioMqttNotifier"; + private HashMap uri2client = new HashMap(); + + public NotificationHandlerMQTT(SubscriptionService subscriptionManagerService, ContextResolverBasic contextResolver, + ObjectMapper objectMapper) { + super(subscriptionManagerService, contextResolver, objectMapper); + + } + + @Override + protected void sendReply(ResponseEntity reply, URI callback, Map clientSettings) + throws Exception { + MqttClient client = getClient(callback, clientSettings); + String qosString = null; + if(clientSettings != null) { + qosString = clientSettings.get(NGSIConstants.MQTT_QOS); + + } else { + qosString = String.valueOf(NGSIConstants.DEFAULT_MQTT_QOS); + } + int qos = 1; + if (qosString != null) { + qos = Integer.parseInt(qosString); + } + byte[] payload = getPayload(reply); + if (client instanceof Mqtt3BlockingClient) { + Mqtt3BlockingClient client3 = (Mqtt3BlockingClient) client; + client3.publishWith().topic(callback.getPath().substring(1)).qos(MqttQos.fromCode(qos)) + .payload(payload).send(); + } else { + Mqtt5BlockingClient client5 = (Mqtt5BlockingClient) client; + client5.publishWith().topic(callback.getPath().substring(1)) + .contentType(reply.getHeaders().getFirst(HttpHeaders.CONTENT_TYPE)).qos(MqttQos.fromCode(qos)) + .payload(payload).send(); + } + } + + private byte[] getPayload(ResponseEntity reply) { + HttpHeaders headers = reply.getHeaders(); + Map metaData = new HashMap(); + StringBuilder result = new StringBuilder("{\""+NGSIConstants.METADATA+"\":{"); + for(Entry> entry: headers.entrySet()) { + result.append("\""); + result.append(entry.getKey()); + result.append("\":"); + if(entry.getValue().size() != 1) { + result.append("["); + for(String headerValue: entry.getValue()) { + result.append(headerValue + ","); + } + result.setCharAt(result.length() - 1, ']'); + }else { + result.append("\""); + result.append(entry.getValue().get(0)); + result.append("\""); + } + result.append(","); + } + result.setCharAt(result.length() - 1, '}'); + result.append(","); + result.append("\""); + result.append(NGSIConstants.BODY); + result.append("\":{"); + result.append(new String(reply.getBody())); + result.append("}"); + result.append("}"); + return result.toString().getBytes(); + } + + private MqttClient getClient(URI callback, Map clientSettings) { + URI baseURI = URI.create(callback.getScheme() + "://" + callback.getAuthority()); + MqttClient result = uri2client.get(baseURI); + if (result == null) { + String mqttVersion = null; + if(clientSettings != null) { + mqttVersion = clientSettings.get(NGSIConstants.MQTT_VERSION); + + } else { + mqttVersion = NGSIConstants.DEFAULT_MQTT_VERSION; + } + + int port = callback.getPort(); + if (port == -1) { + port = 1883; + } + if (mqttVersion == null || mqttVersion.equals(NGSIConstants.MQTT_VERSION_5)) { + result = Mqtt5Client.builder().identifier(CLIENT_ID).serverHost(callback.getHost()).serverPort(port) + .buildBlocking(); + ((Mqtt5BlockingClient) result).connect(); + } else if(mqttVersion.equals(NGSIConstants.MQTT_VERSION_3)) { + result = Mqtt3Client.builder().identifier(CLIENT_ID).serverHost(callback.getHost()).serverPort(port) + .buildBlocking(); + ((Mqtt3BlockingClient) result).connect(); + } + uri2client.put(baseURI, result); + + } + return result; + } + + +} diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/NotificationHandlerREST.java b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/NotificationHandlerREST.java new file mode 100644 index 0000000000000000000000000000000000000000..7b06f8d738ade456574dcf498bc02ea2fe0aaf72 --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/NotificationHandlerREST.java @@ -0,0 +1,27 @@ +package eu.neclab.ngsildbroker.subscriptionmanager.service; + +import java.net.URI; +import java.util.Map; + +import org.springframework.http.ResponseEntity; +import com.fasterxml.jackson.databind.ObjectMapper; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; + +public class NotificationHandlerREST extends BaseNotificationHandler { + + + + public NotificationHandlerREST(SubscriptionService subscriptionManagerService, ContextResolverBasic contextResolver, + ObjectMapper objectMapper) { + super(subscriptionManagerService, contextResolver, objectMapper); + + } + + @Override + protected void sendReply(ResponseEntity reply, URI callback, Map clientSettings) throws Exception { + httpUtils.doPost(callback, reply.getBody(), + reply.getHeaders().toSingleValueMap()); + + } + +} diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionInfoDAO.java b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionInfoDAO.java new file mode 100644 index 0000000000000000000000000000000000000000..55e39198c0cca9b8e47782be8008b0794abe60de --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionInfoDAO.java @@ -0,0 +1,31 @@ +package eu.neclab.ngsildbroker.subscriptionmanager.service; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.springframework.stereotype.Repository; + +import eu.neclab.ngsildbroker.commons.storage.StorageReaderDAO; + +@Repository +public class SubscriptionInfoDAO extends StorageReaderDAO { + public Set getAllIds() { + List tempList = readerJdbcTemplate.queryForList("SELECT id FROM entity", String.class); + return new HashSet(tempList); + } + public Map getIds2Type() { + List> temp = readerJdbcTemplate.queryForList("SELECT id, type FROM entity"); + HashMap result = new HashMap(); + for(Map entry: temp) { + result.put(entry.get("id").toString(), entry.get("type").toString()); + } + return result; + } + public String getEntity(String entityId) { + List tempList = readerJdbcTemplate.queryForList("SELECT data FROM entity WHERE id='" + entityId + "'", String.class); + return tempList.get(0); + } +} diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionService.java b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionService.java new file mode 100644 index 0000000000000000000000000000000000000000..e847c887765c6b1f819bcdaa8f75269589de4bc0 --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionService.java @@ -0,0 +1,991 @@ +package eu.neclab.ngsildbroker.subscriptionmanager.service; + +import static eu.neclab.ngsildbroker.commons.constants.NGSIConstants.GEO_REL_CONTAINS; +import static eu.neclab.ngsildbroker.commons.constants.NGSIConstants.GEO_REL_DISJOINT; +import static eu.neclab.ngsildbroker.commons.constants.NGSIConstants.GEO_REL_EQUALS; +import static eu.neclab.ngsildbroker.commons.constants.NGSIConstants.GEO_REL_INTERSECTS; +import static eu.neclab.ngsildbroker.commons.constants.NGSIConstants.GEO_REL_NEAR; +import static eu.neclab.ngsildbroker.commons.constants.NGSIConstants.GEO_REL_OVERLAPS; +import static eu.neclab.ngsildbroker.commons.constants.NGSIConstants.GEO_REL_WITHIN; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Timer; +import java.util.TimerTask; +import java.util.UUID; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.locationtech.spatial4j.SpatialPredicate; +import org.locationtech.spatial4j.context.jts.JtsSpatialContext; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeFactory.PolygonBuilder; +import org.locationtech.spatial4j.shape.jts.JtsShapeFactory; +import org.mapdb.DB; +import org.mapdb.DBMaker; +import org.mapdb.HTreeMap; +import org.mapdb.Serializer; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpHeaders; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.kafka.requestreply.ReplyingKafkaTemplate; +import org.springframework.kafka.support.KafkaHeaders; +import org.springframework.messaging.Message; +import org.springframework.stereotype.Service; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.github.filosganga.geogson.model.Point; +import com.github.filosganga.geogson.model.Polygon; +import com.github.filosganga.geogson.model.positions.SinglePosition; +import com.google.common.collect.ArrayListMultimap; +import com.google.gson.JsonParseException; +import com.netflix.appinfo.InstanceInfo; +import com.netflix.discovery.EurekaClient; +import com.netflix.discovery.shared.Application; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.datatypes.BaseProperty; +import eu.neclab.ngsildbroker.commons.datatypes.EndPoint; +import eu.neclab.ngsildbroker.commons.datatypes.Entity; +import eu.neclab.ngsildbroker.commons.datatypes.EntityInfo; +import eu.neclab.ngsildbroker.commons.datatypes.GeoProperty; +import eu.neclab.ngsildbroker.commons.datatypes.GeoPropertyEntry; +import eu.neclab.ngsildbroker.commons.datatypes.LDGeoQuery; +import eu.neclab.ngsildbroker.commons.datatypes.Notification; +import eu.neclab.ngsildbroker.commons.datatypes.NotificationParam; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.datatypes.SubscriptionRequest; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.enums.Format; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.interfaces.NotificationHandler; +import eu.neclab.ngsildbroker.commons.interfaces.SubscriptionManager; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.ngsiqueries.QueryParser; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.tools.EntityTools; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; + +@Service +public class SubscriptionService implements SubscriptionManager { +//TODO Change notification data generation so that always the changed value from kafka is definatly present in the notification not the DB version(that one could have been already updated) + + private final static Logger logger = LogManager.getLogger(SubscriptionService.class); + + private final int CREATE = 0; + private final int APPEND = 1; + private final int UPDATE = 2; + private final int DELETE = 3; + + private final String ALL_TYPES_TYPE = "()"; + + @Value("${atcontext.url}") + String atContextServerUrl; + + NotificationHandlerREST notificationHandlerREST; + IntervalNotificationHandler intervalHandlerREST; + + NotificationHandlerMQTT notificationHandlerMQTT; + IntervalNotificationHandler intervalHandlerMQTT; + + Timer watchDog = new Timer(true); + + // KafkaOps kafkaOps = new KafkaOps(); + + @Autowired + ObjectMapper objectMapper; + + @Autowired + @Qualifier("smconRes") + ContextResolverBasic contextResolverService; + + @Autowired + EurekaClient eurekaClient; + + @Autowired + @Qualifier("smqueryParser") + QueryParser queryParser; + + @Autowired + ReplyingKafkaTemplate kafkaTemplate; + + @Autowired + SubscriptionInfoDAO subscriptionInfoDAO; + + @Value("${query.topic}") + String requestTopic; + + @Value("${query.result.topic}") + String queryResultTopic; + + @Autowired + @Qualifier("smparamsResolver") + ParamsResolver paramsResolver; + + boolean directDB = true; + + JtsShapeFactory shapeFactory = JtsSpatialContext.GEO.getShapeFactory(); + + HashMap subscriptionId2Subscription = new HashMap(); + HashMap subId2TimerTask = new HashMap(); + ArrayListMultimap type2EntitiesSubscriptions = ArrayListMultimap.create(); + HashMap sub2CreationTime = new HashMap(); + ArrayListMultimap subscriptionId2Context = ArrayListMultimap.create(); + HashMap remoteNotifyCallbackId2InternalSub = new HashMap(); + @Value("${bootstrap.servers}") + String BOOTSTRAP_SERVERS; + + HttpUtils httpUtils; + + private Map ids2Type; + + private HTreeMap subscriptionStore; + @Value("${subscriptions.store:subscriptionstore.db}") + private String subscriptionStoreLocation; + + // @Value("${notification.port}") + // String REMOTE_NOTIFICATION_PORT; + + public SubscriptionService() { + } + + @PostConstruct + private void setup() { + this.ids2Type = subscriptionInfoDAO.getIds2Type(); + httpUtils = HttpUtils.getInstance(contextResolverService); + notificationHandlerREST = new NotificationHandlerREST(this, contextResolverService, objectMapper); + intervalHandlerREST = new IntervalNotificationHandler(notificationHandlerREST, kafkaTemplate, queryResultTopic, + requestTopic, paramsResolver); + notificationHandlerMQTT = new NotificationHandlerMQTT(this, contextResolverService, objectMapper); + intervalHandlerMQTT = new IntervalNotificationHandler(notificationHandlerMQTT, kafkaTemplate, queryResultTopic, + requestTopic, paramsResolver); + logger.trace("call loadStoredSubscriptions() ::"); + this.subscriptionStore = DBMaker.fileDB(this.subscriptionStoreLocation).closeOnJvmShutdown().checksumHeaderBypass().transactionEnable().make().hashMap("subscriptions", Serializer.STRING, Serializer.STRING).createOrOpen(); + loadStoredSubscriptions(); + + } + @PreDestroy + private void deconstructor() { + subscriptionStore.close(); + } + private void loadStoredSubscriptions() { + // TODO Auto-generated method stub + synchronized (this.subscriptionStore) { + for (Entry entry : subscriptionStore.entrySet()) { + try { + SubscriptionRequest subscription = DataSerializer.getSubscriptionRequest(entry.getValue()); + subscribe(subscription); + } catch (JsonParseException e) { + logger.error("Exception ::", e); + e.printStackTrace(); + continue; + } catch (ResponseException e) { + logger.error("Exception ::", e); + e.printStackTrace(); + continue; + } + } + + } + } + + @Override + public URI subscribe(SubscriptionRequest subscriptionRequest) throws ResponseException { + logger.debug("Subscribe got called " + subscriptionRequest.getSubscription().toString()); + Subscription subscription = subscriptionRequest.getSubscription(); + validateSub(subscription); + if (subscription.getId() == null) { + subscription.setId(generateUniqueSubId(subscription)); + } else { + if (this.subscriptionId2Subscription.containsKey(subscription.getId().toString())) { + throw new ResponseException(ErrorType.AlreadyExists); + } + } + + this.subscriptionId2Subscription.put(subscription.getId().toString(), subscription); + if (subscription.getLdQuery() != null && !subscription.getLdQuery().trim().equals("")) { + subscription + .setQueryTerm(queryParser.parseQuery(subscription.getLdQuery(), subscriptionRequest.getContext())); + } + String endpointProtocol = subscription.getNotification().getEndPoint().getUri().getScheme(); + if (subscription.getTimeInterval() > 0) { + if (endpointProtocol.equals("mqtt")) { + intervalHandlerMQTT.addSub(subscriptionRequest); + } else { + intervalHandlerREST.addSub(subscriptionRequest); + } + } else { + this.subscriptionId2Context.putAll(subscription.getId().toString(), subscriptionRequest.getContext()); + this.sub2CreationTime.put(subscription, System.currentTimeMillis()); + List entities = subscription.getEntities(); + if (entities == null || entities.isEmpty()) { + this.type2EntitiesSubscriptions.put(ALL_TYPES_TYPE, subscription); + } else { + for (EntityInfo info : subscription.getEntities()) { + this.type2EntitiesSubscriptions.put(info.getType(), subscription); + + } + + } + storeSubscription(subscriptionRequest); + + if (subscription.getExpires() != null) { + TimerTask cancel = new TimerTask() { + + @Override + public void run() { + try { + unsubscribe(subscription.getId()); + } catch (ResponseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + }; + subId2TimerTask.put(subscription.getId().toString(), cancel); + watchDog.schedule(cancel, subscription.getExpires() - System.currentTimeMillis()); + } + } + return subscription.getId(); + } + + private void validateSub(Subscription subscription) throws ResponseException { + if (subscription.getThrottling() > 0 && subscription.getTimeInterval() > 0) { + throw new ResponseException(ErrorType.BadRequestData, "throttling and timeInterval cannot both be set"); + } + if (subscription.getTimeInterval() > 0) { + if (subscription.getAttributeNames() == null || subscription.getAttributeNames().isEmpty()) { + return; + } + throw new ResponseException(ErrorType.BadRequestData, + "watchedAttributes and timeInterval cannot both be set"); + } + + } + + private void storeSubscription(SubscriptionRequest subscription) throws ResponseException { + new Thread() { + public void run() { + synchronized (subscriptionStore) { + subscriptionStore.put(subscription.getSubscription().getId().toString(), DataSerializer.toJson(subscription)); + } + }; + }.start(); + + } + + private URI generateUniqueSubId(Subscription subscription) { + + try { + return new URI("urn:ngsi-ld:Subscription:" + subscription.hashCode()); + } catch (URISyntaxException e) { + // Left empty intentionally should never happen + throw new AssertionError(); + } + } + + @Override + public void unsubscribe(URI id) throws ResponseException { + Subscription removedSub; + synchronized (subscriptionId2Subscription) { + removedSub = this.subscriptionId2Subscription.remove(id.toString()); + } + + if (removedSub == null) { + throw new ResponseException(ErrorType.NotFound); + } + synchronized (subscriptionId2Context) { + this.subscriptionId2Context.removeAll(id.toString()); + } + intervalHandlerREST.removeSub(id.toString()); + intervalHandlerMQTT.removeSub(id.toString()); + List entities = removedSub.getEntities(); + if (entities == null || entities.isEmpty()) { + synchronized (type2EntitiesSubscriptions) { + type2EntitiesSubscriptions.remove(ALL_TYPES_TYPE, removedSub); + } + } else { + for (EntityInfo info : entities) { + synchronized (type2EntitiesSubscriptions) { + type2EntitiesSubscriptions.remove(info.getType(), removedSub); + } + } + } + TimerTask task = subId2TimerTask.get(id.toString()); + if (task != null) { + task.cancel(); + } + // TODO remove remote subscription + new Thread() { + public void run() { + synchronized (subscriptionStore) { + subscriptionStore.remove(id.toString()); + } + }; + }.start(); + + } + + @Override + public Subscription updateSubscription(SubscriptionRequest subscriptionRequest) throws ResponseException { + Subscription subscription = subscriptionRequest.getSubscription(); + Subscription oldSub = subscriptionId2Subscription.get(subscription.getId().toString()); + + if (oldSub == null) { + throw new ResponseException(ErrorType.NotFound); + } + if (subscription.getAttributeNames() != null) { + oldSub.setAttributeNames(subscription.getAttributeNames()); + } + if (subscription.getDescription() != null) { + oldSub.setDescription(subscription.getDescription()); + } + if (subscription.getEntities() != null && !subscription.getEntities().isEmpty()) { + oldSub.setEntities(subscription.getEntities()); + } + if (subscription.getExpires() != null) { + oldSub.setExpires(subscription.getExpires()); + synchronized (subId2TimerTask) { + TimerTask task = subId2TimerTask.get(oldSub.getId().toString()); + task.cancel(); + watchDog.schedule(task, subscription.getExpires() - System.currentTimeMillis()); + } + + } + if (subscription.getLdGeoQuery() != null) { + oldSub.setLdGeoQuery(subscription.getLdGeoQuery()); + } + if (subscription.getLdQuery() != null) { + oldSub.setLdQuery(subscription.getLdQuery()); + } + if (subscription.getLdTempQuery() != null) { + oldSub.setLdTempQuery(subscription.getLdTempQuery()); + } + if (subscription.getNotification() != null) { + oldSub.setNotification(subscription.getNotification()); + } + if (subscription.getThrottling() != 0) { + oldSub.setThrottling(subscription.getThrottling()); + } + if (subscription.getTimeInterval() != 0) { + oldSub.setTimeInterval(subscription.getTimeInterval()); + } + synchronized (this.subscriptionId2Context) { + this.subscriptionId2Context.putAll(oldSub.getId().toString(), subscriptionRequest.getContext()); + } + return oldSub; + } + + @Override + public List getAllSubscriptions(int limit) { + List result = new ArrayList(); + result.addAll(subscriptionId2Subscription.values()); + if (limit > 0) { + if (limit < result.size()) { + result = result.subList(0, limit); + } + } + return result; + } + + @Override + public Subscription getSubscription(String subscriptionId) throws ResponseException { + if (subscriptionId2Subscription.containsKey(subscriptionId)) { + return subscriptionId2Subscription.get(subscriptionId); + } else { + throw new ResponseException(ErrorType.NotFound); + } + + } + + @KafkaListener(topics = "${entity.create.topic}", groupId = "submanager") + public void handleCreate(Message message) { + String payload = new String(message.getPayload()); + String key = KafkaOps.getMessageKey(message); + logger.debug("Create got called: " + payload); + logger.debug(key); + checkSubscriptionsWithCreate(key, payload, (long) message.getHeaders().get(KafkaHeaders.RECEIVED_TIMESTAMP)); + } + + private void checkSubscriptionsWithCreate(String key, String payload, long messageTime) { + Entity create = DataSerializer.getEntity(payload); + synchronized (this.ids2Type) { + this.ids2Type.put(key, create.getType()); + } + + ArrayList subsToCheck = new ArrayList(); + for (Subscription sub : this.type2EntitiesSubscriptions.get(create.getType())) { + for (EntityInfo entityInfo : sub.getEntities()) { + if (entityInfo.getId() == null && entityInfo.getIdPattern() == null) { + subsToCheck.add(sub); + break; + } + if (entityInfo.getId() != null && entityInfo.getId().toString().equals(key)) { + subsToCheck.add(sub); + break; + } + if (entityInfo.getIdPattern() != null && key.matches(entityInfo.getIdPattern())) { + subsToCheck.add(sub); + break; + } + } + } + subsToCheck.addAll(this.type2EntitiesSubscriptions.get(ALL_TYPES_TYPE)); + checkSubscriptions(subsToCheck, create, CREATE, messageTime); + + } + + private void checkSubscriptions(ArrayList subsToCheck, Entity entity, int methodType, + long messageTime) { + + for (Subscription subscription : subsToCheck) { + if (messageTime >= sub2CreationTime.get(subscription)) { + new Thread() { + public void run() { + Entity data = null; + try { + switch (methodType) { + case CREATE: + data = generateNotificationEntity(entity, subscription); + + break; + case APPEND: + data = generateDataFromBaseOp(entity, subscription); + break; + case UPDATE: + data = generateDataFromBaseOp(entity, subscription); + break; + case DELETE: + + break; + + default: + break; + } + + if (data != null) { + ArrayList dataList = new ArrayList(); + dataList.add(data); + sendNotification(dataList, subscription); + } + } catch (ResponseException e) { + logger.error("Failed to handle new data for the subscriptions, cause: " + e.getMessage()); + } + } + + }.start(); + } + + } + + } + + private void sendNotification(List dataList, Subscription subscription) { + logger.debug(DataSerializer.toJson(dataList)); + // System.out.println("SENDING NOTIFICATION: " + DataSerializer.toJson(dataList) + // + " \nTO SUBSCRIPTION \n" + // + DataSerializer.toJson(subscription)); + // if (subscription.getTimeInterval() > 0) { + // try { + // intervalHandler.notify(new + // Notification(EntityTools.getRandomID("notification:"), + // System.currentTimeMillis(), subscription.getId(), dataList, null, null, 0, + // true), + // subscription.getId().toString()); + // } catch (URISyntaxException e) { + // logger.error("Exception ::", e); + // // Left empty intentionally + // throw new AssertionError(); + // } + // } else { + try { + String endpointProtocol = subscription.getNotification().getEndPoint().getUri().getScheme(); + + NotificationHandler handler; + if (endpointProtocol.equals("mqtt")) { + handler = notificationHandlerMQTT; + } else { + handler = notificationHandlerREST; + } + handler.notify( + new Notification(EntityTools.getRandomID("notification:"), System.currentTimeMillis(), + subscription.getId(), dataList, null, null, 0, true), + subscription.getNotification().getEndPoint().getUri(), + subscription.getNotification().getEndPoint().getAccept(), subscription.getId().toString(), + subscriptionId2Context.get(subscription.getId().toString()), subscription.getThrottling(), + subscription.getNotification().getEndPoint().getNotifierInfo()); + } catch (URISyntaxException e) { + logger.error("Exception ::", e); + // Left empty intentionally + throw new AssertionError(); + } + // } + } + + private Entity generateNotificationEntity(Entity entity, Subscription subscription) throws ResponseException { + + if (!evaluateGeoQuery(subscription.getLdGeoQuery(), entity.getLocation())) { + return null; + } + if (subscription.getQueryTerm() != null) { + if (!subscription.getQueryTerm().calculate(entity.getAllBaseProperties())) { + return null; + } + } + List baseProps = extractBaseProps(entity, subscription); + + if (baseProps.isEmpty()) { + return null; + } + Entity result = new Entity(entity.getId(), entity.getType(), baseProps, entity.getRefToAccessControl()); + + return result; + } + + private List extractBaseProps(Entity entity, Subscription subscription) { + ArrayList result = new ArrayList(); + if (!shouldFire(entity, subscription)) { + return result; + } + ArrayList attribNames = getAttribNames(subscription); + if (attribNames.isEmpty()) { + return entity.getAllBaseProperties(); + } + + for (BaseProperty property : entity.getAllBaseProperties()) { + if (attribNames.contains(property.getIdString())) { + result.add(property); + } + } + return result; + } + + private boolean shouldFire(Entity entity, Subscription subscription) { + if (subscription.getAttributeNames() == null || subscription.getAttributeNames().isEmpty()) { + return true; + } + for (String attribName : subscription.getAttributeNames()) { + for (BaseProperty baseProp : entity.getAllBaseProperties()) { + if (attribName.equals(baseProp.getIdString())) { + return true; + } + } + } + return false; + } + + private Entity generateDataFromBaseOp(Entity deltaInfo, Subscription subscription) throws ResponseException { + String entityBody = null; + if (!shouldFire(deltaInfo, subscription)) { + return null; + } + + if (directDB) { + entityBody = subscriptionInfoDAO.getEntity(deltaInfo.getId().toString()); + } + // HERE YOU NEED TO REPLACE THE ATTRIBUTE TO THE ONE FROM DELTA + Entity entity = DataSerializer.getEntity(entityBody); + if (!evaluateGeoQuery(subscription.getLdGeoQuery(), entity.getLocation())) { + return null; + } + if (subscription.getQueryTerm() != null) { + if (!subscription.getQueryTerm().calculate(entity.getAllBaseProperties())) { + return null; + } + } + + List baseProps = extractBaseProps(entity, subscription); + if (baseProps.isEmpty()) { + return null; + } + Entity temp = new Entity(deltaInfo.getId(), entity.getType(), baseProps, entity.getRefToAccessControl()); + + return temp; + } + + private ArrayList getAttribNames(Subscription subscription) { + ArrayList attribNames = new ArrayList(); + if (subscription.getNotification().getAttributeNames() != null) { + attribNames.addAll(subscription.getNotification().getAttributeNames()); + } + // if (subscription.getAttributeNames() != null) { + // attribNames.addAll(subscription.getAttributeNames()); + // } + return attribNames; + } + + private boolean evaluateGeoQuery(LDGeoQuery geoQuery, GeoProperty location) { + return evaluateGeoQuery(geoQuery, location, -1); + } + + private boolean evaluateGeoQuery(LDGeoQuery geoQuery, GeoProperty location, double expandArea) { + + if (geoQuery == null) { + return true; + } + + String relation = geoQuery.getGeoRelation().getRelation(); + List coordinates = geoQuery.getCoordinates(); + + if (location == null) { + return false; + } + Iterator it = location.getEntries().values().iterator(); + while (it.hasNext()) { + GeoPropertyEntry next = it.next(); + if (GEO_REL_EQUALS.equals(relation)) { + if (next.getGeoValue() instanceof Point) { + List geoValueAsList = java.util.Arrays.asList(((Point) next.getGeoValue()).lon(), + ((Point) next.getGeoValue()).lat()); + + return geoValueAsList.equals(geoQuery.getCoordinates()); + } else { + // TODO + + return false; + } + } else { + + Shape entityShape; + if (next.getGeoValue() instanceof Point) { + entityShape = shapeFactory.pointXY(((Point) next.getGeoValue()).lon(), + ((Point) next.getGeoValue()).lat()); + } else if (next.getGeoValue() instanceof Polygon) { + PolygonBuilder polygonBuilder = shapeFactory.polygon(); + Iterator it2 = ((Polygon) next.getGeoValue()).positions().children().iterator() + .next().children().iterator(); + while (it2.hasNext()) { + polygonBuilder.pointXY(((SinglePosition) it2).coordinates().getLon(), + ((SinglePosition) it2).coordinates().getLat()); + } + entityShape = polygonBuilder.build(); + } else { + logger.error("Unsupported GeoJson type. Currently Point and Polygon are supported."); + return false; + } + Shape queryShape; + switch (geoQuery.getGeometry()) { + case Point: { + queryShape = shapeFactory.pointXY(coordinates.get(0), coordinates.get(1)); + break; + } + case Polygon: { + PolygonBuilder polygonBuilder = shapeFactory.polygon(); + for (int i = 0; i < coordinates.size(); i = i + 2) { + polygonBuilder.pointXY(coordinates.get(i), coordinates.get(i + 1)); + } + + queryShape = polygonBuilder.build(); + break; + } + default: { + return false; + } + } + if (GEO_REL_CONTAINS.equals(relation)) { + return SpatialPredicate.Contains.evaluate(entityShape, queryShape); + } else if (GEO_REL_DISJOINT.equals(relation)) { + return SpatialPredicate.IsDisjointTo.evaluate(entityShape, queryShape); + } else if (GEO_REL_INTERSECTS.equals(relation)) { + if (expandArea != -1) { + queryShape = queryShape.getBuffered(expandArea, queryShape.getContext()); + } + return SpatialPredicate.Intersects.evaluate(entityShape, queryShape); + } else if (GEO_REL_NEAR.equals(relation)) { + if (geoQuery.getGeoRelation().getMaxDistance() != null) { + Shape bufferedShape = queryShape.getBuffered(geoQuery.getGeoRelation().getMaxDistanceAsDouble(), + queryShape.getContext()); + return SpatialPredicate.IsWithin.evaluate(entityShape, bufferedShape); + } else if (geoQuery.getGeoRelation().getMinDistance() != null) { + Shape bufferedShape = queryShape.getBuffered(geoQuery.getGeoRelation().getMinDistanceAsDouble(), + queryShape.getContext()); + return !SpatialPredicate.IsWithin.evaluate(entityShape, bufferedShape); + } else { + return false; + } + + } else if (GEO_REL_OVERLAPS.equals(relation)) { + return SpatialPredicate.Overlaps.evaluate(entityShape, queryShape); + } else if (GEO_REL_WITHIN.equals(relation)) { + if (expandArea != -1) { + queryShape = queryShape.getBuffered(expandArea, queryShape.getContext()); + } + return SpatialPredicate.IsWithin.evaluate(entityShape, queryShape); + } else { + return false; + } + } + + } + return false; + } + + // private Property getPropertyByName(String name, List properties) { + // for (Property property : properties) { + // if (property.getName().equals(name)) { + // return property; + // } + // } + // return null; + // } + + @KafkaListener(topics = "${entity.update.topic}", groupId = "submanager") + public void handleUpdate(Message message) { + String payload = new String(message.getPayload()); + String key = KafkaOps.getMessageKey(message); + logger.debug("update got called: " + payload); + logger.debug(key); + checkSubscriptionsWithUpdate(key, payload, (long) message.getHeaders().get(KafkaHeaders.RECEIVED_TIMESTAMP)); + } + + private void checkSubscriptionsWithUpdate(String key, String payload, long messageTime) { + Entity update = DataSerializer.getPartialEntity(payload); + String type = getTypeForId(key); + try { + update.setId(new URI(key)); + } catch (URISyntaxException e) { + // left empty intentionally should never happen because the uri should be + // already checked + e.printStackTrace(); + } + update.setType(type); + ArrayList subsToCheck = new ArrayList(); + for (Subscription sub : this.type2EntitiesSubscriptions.get(type)) { + for (EntityInfo entityInfo : sub.getEntities()) { + if (entityInfo.getId() == null && entityInfo.getIdPattern() == null) { + subsToCheck.add(sub); + break; + } + if (entityInfo.getId() != null && entityInfo.getId().toString().equals(key)) { + subsToCheck.add(sub); + break; + } + if (entityInfo.getIdPattern() != null && key.matches(entityInfo.getIdPattern())) { + subsToCheck.add(sub); + break; + } + } + } + subsToCheck.addAll(this.type2EntitiesSubscriptions.get(ALL_TYPES_TYPE)); + checkSubscriptions(subsToCheck, update, UPDATE, messageTime); + + } + + @KafkaListener(topics = "${entity.append.topic}", groupId = "submanager") + public void handleAppend(Message message) { + String payload = new String(message.getPayload()); + String key = KafkaOps.getMessageKey(message); + logger.debug("Create got called: " + payload); + logger.debug(key); + checkSubscriptionsWithAppend(key, payload, (long) message.getHeaders().get(KafkaHeaders.RECEIVED_TIMESTAMP)); + } + + private void checkSubscriptionsWithAppend(String key, String payload, long messageTime) { + Entity append = DataSerializer.getPartialEntity(payload); + String type = getTypeForId(key); + try { + append.setId(new URI(key)); + } catch (URISyntaxException e) { + // left empty intentionally should never happen because the uri should be + // already checked + e.printStackTrace(); + } + append.setType(type); + ArrayList subsToCheck = new ArrayList(); + for (Subscription sub : this.type2EntitiesSubscriptions.get(type)) { + for (EntityInfo entityInfo : sub.getEntities()) { + if (entityInfo.getId() == null && entityInfo.getIdPattern() == null) { + subsToCheck.add(sub); + break; + } + if (entityInfo.getId() != null && entityInfo.getId().toString().equals(key)) { + subsToCheck.add(sub); + break; + } + if (entityInfo.getIdPattern() != null && key.matches(entityInfo.getIdPattern())) { + subsToCheck.add(sub); + break; + } + } + } + subsToCheck.addAll(this.type2EntitiesSubscriptions.get(ALL_TYPES_TYPE)); + checkSubscriptions(subsToCheck, append, APPEND, messageTime); + + } + + // @StreamListener(SubscriptionManagerConsumerChannel.deleteReadChannel) + @KafkaListener(topics = "${entity.delete.topic}", groupId = "submanager") + public void handleDelete(Message message) throws Exception { + this.ids2Type.remove(KafkaOps.getMessageKey(message)); + // checkSubscriptionsWithDelete(new String((byte[]) + // message.getHeaders().get(KafkaHeaders.RECEIVED_MESSAGE_KEY)), + // new String(message.getPayload())); + } + + @KafkaListener(topics = "${csource.notification.topic}", groupId = "submanager") + public void handleCSourceNotification(Message message) { + String payload = new String(message.getPayload()); + String key = KafkaOps.getMessageKey(message); + @SuppressWarnings("unchecked") + ArrayList endPoints = DataSerializer.getStringList(payload); + subscribeToRemote(subscriptionId2Subscription.get(key), endPoints); + } + + // @KafkaListener(topics = "${csource.registry.topic}", groupId = "submanager") + // public void handleCSourceRegistry(Message message) throws Exception { + // CSourceRegistration csourceRegistration = objectMapper.readValue((byte[]) + // message.getPayload(), + // CSourceRegistration.class); + // checkSubscriptionsWithCSource(csourceRegistration); + // } + + private void subscribeToRemote(Subscription sub, ArrayList remoteEndPoints) { + new Thread() { + @Override + public void run() { + + Subscription remoteSub = new Subscription(); + remoteSub.setCustomFlags(sub.getCustomFlags()); + remoteSub.setDescription(sub.getDescription()); + remoteSub.setEntities(sub.getEntities()); + remoteSub.setExpires(sub.getExpires()); + remoteSub.setLdGeoQuery(sub.getLdGeoQuery()); + remoteSub.setLdQuery(sub.getLdQuery()); + remoteSub.setLdTempQuery(sub.getLdTempQuery()); + remoteSub.setName(sub.getName()); + remoteSub.setStatus(sub.getStatus()); + remoteSub.setThrottling(sub.getThrottling()); + remoteSub.setTimeInterval(sub.getTimeInterval()); + remoteSub.setType(sub.getType()); + NotificationParam remoteNotification = new NotificationParam(); + remoteNotification.setAttributeNames(sub.getNotification().getAttributeNames()); + remoteNotification.setFormat(Format.normalized); + EndPoint endPoint = new EndPoint(); + endPoint.setAccept(AppConstants.NGB_APPLICATION_JSONLD); + endPoint.setUri(prepareNotificationServlet(sub)); + remoteNotification.setEndPoint(endPoint); + remoteSub.setAttributeNames(sub.getAttributeNames()); + String body = DataSerializer.toJson(remoteSub); + HashMap additionalHeaders = new HashMap(); + additionalHeaders.put(HttpHeaders.ACCEPT, AppConstants.NGB_APPLICATION_JSONLD); + for (String remoteEndPoint : remoteEndPoints) { + try { + httpUtils.doPost(new URI(remoteEndPoint), body, additionalHeaders); + } catch (IOException e) { + // TODO what to do when a remote sub times out ? at the moment we just fail here + e.printStackTrace(); + } catch (URISyntaxException e) { + + e.printStackTrace(); + } + } + + } + }.start(); + + } + + private URI prepareNotificationServlet(Subscription subToCheck) { + Application application = eurekaClient.getApplication("gateway"); + InstanceInfo instanceInfo = application.getInstances().get(0); + // TODO : search for a better way to resolve http or https + String hostIP = instanceInfo.getIPAddr(); + String uuid = Long.toString(UUID.randomUUID().getLeastSignificantBits()); + int port = instanceInfo.getPort(); + + remoteNotifyCallbackId2InternalSub.put(uuid, subToCheck); + StringBuilder url = new StringBuilder("http://").append(hostIP).append(":").append(port) + .append("/remotenotify/").append(uuid); + // System.out.println("URL : "+url.toString()); + try { + return new URI(url.toString()); + } catch (URISyntaxException e) { + logger.error("Exception ::", e); + // should never happen + return null; + } + + } + + @SuppressWarnings("unused") + // Kept for now ... Delete notifications are not present + private void checkSubscriptionsWithDelete(String key, String payload, long messageTime) { + Entity delete = DataSerializer.getEntity(payload); + ArrayList subsToCheck = new ArrayList(); + /* + * subsToCheck.addAll(this.idBasedSubscriptions.get(key)); + * subsToCheck.addAll(this.typeBasedSubscriptions.get(delete.getType())); + */ + checkSubscriptions(subsToCheck, delete, DELETE, messageTime); + + } + + private String getTypeForId(String key) { + synchronized (this.ids2Type) { + return (String) this.ids2Type.get(key); + } + /* + * //this has to be db handled byte[] json = kafkaOps.getMessage(key, + * KafkaConstants.ENTITY_TOPIC); if (json == null) { return ""; } try { return + * objectMapper.readTree(json).get(JSON_LD_TYPE).get(0).asText(""); } catch + * (IOException e) { logger.error("Exception ::", e); e.printStackTrace(); } + * return ""; + */ + } + + @Override + public void remoteNotify(String id, Notification notification) { + new Thread() { + @Override + public void run() { + Subscription subscription = remoteNotifyCallbackId2InternalSub.get(id); + sendNotification(notification.getData(), subscription); + } + }.start(); + + } + + public void reportNotification(String subId, Long now) { + synchronized (subscriptionId2Subscription) { + Subscription subscription = subscriptionId2Subscription.get(subId); + if (subscription != null) { + subscription.getNotification().setLastNotification(new Date(now)); + subscription.getNotification().setLastSuccessfulNotification(new Date(now)); + } + } + } + + public void reportFailedNotification(String subId, Long now) { + synchronized (subscriptionId2Subscription) { + Subscription subscription = subscriptionId2Subscription.get(subId); + if (subscription != null) { + subscription.getNotification().setLastFailedNotification(new Date(now)); + } + } + } + + public void reportSuccessfulNotification(String subId, Long now) { + synchronized (subscriptionId2Subscription) { + Subscription subscription = subscriptionId2Subscription.get(subId); + if (subscription != null) { + subscription.getNotification().setLastSuccessfulNotification(new Date(now)); + } + } + } + +} diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/resources/application-aaio.yml b/scorpio-broker/Core/SubscriptionManager/src/main/resources/application-aaio.yml new file mode 100644 index 0000000000000000000000000000000000000000..97f02ab6ca5b7e3f32d94c69a936ea1dc57f0b5d --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/resources/application-aaio.yml @@ -0,0 +1,23 @@ +server: + port: 2025 + +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +bootstrap: + servers: kafka:9092 + + + +spring: + kafka: + consumer: + bootstrap-servers: kafka:9092 + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/resources/application-aio.yml b/scorpio-broker/Core/SubscriptionManager/src/main/resources/application-aio.yml new file mode 100644 index 0000000000000000000000000000000000000000..bed38cd18b0c21b1f743053e83e1cd375a237154 --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/resources/application-aio.yml @@ -0,0 +1,23 @@ +server: + port: 2025 + +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +bootstrap: + servers: localhost:9092 + + + +spring: + kafka: + consumer: + bootstrap-servers: localhost:9092 + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/resources/application-dist.yml b/scorpio-broker/Core/SubscriptionManager/src/main/resources/application-dist.yml new file mode 100644 index 0000000000000000000000000000000000000000..d505fbd8f01c872d5bf010e89e5c764304d4f179 --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/resources/application-dist.yml @@ -0,0 +1,60 @@ +server: + port: 2025 + +eureka: + client: + serviceUrl: + defaultZone: http://eureka:8761/eureka/ + +bootstrap: + servers: kafka:9092 + + + +spring: + kafka: + consumer: + bootstrap-servers: kafka:9092 + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP + password: ngb + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_registrymanager + username: ngb +writer: + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP_Writer + password: ngb + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_writer + username: ngb + enabled: true +reader: + enabled: true + datasource: + url: "jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_reader" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP_Reader + maxLifetime: 2000000 + connectionTimeout: 30000 + diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/resources/application.yml b/scorpio-broker/Core/SubscriptionManager/src/main/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..2340ad7143d0f0a93fc34a307f54c334dfd5c645 --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/resources/application.yml @@ -0,0 +1,103 @@ +server: + port: 2025 + + +atcontext: + url: http://localhost:9090/ngsi-ld/contextes/ + +#Entity-Manager topics properties +entity: + topic: ENTITY + create: + topic: ENTITY_CREATE + append: + topic: ENTITY_APPEND + update: + topic: ENTITY_UPDATE + delete: + topic: ENTITY_DELETE + +csource: + registry: + topic: CSOURCE_REGISTRATION + notification: + topic: CONTEXT_SOURCE_NOTIFICATION +query: + topic: QUERY + result: + topic: QUERY_RESULT +#kafka broker path +bootstrap: + servers: localhost:9092 + +key: + deserializer: org.apache.kafka.common.serialization.StringDeserializer +#Entity Append ovewrite options +append: + overwrite: noOverwrite +management: + endpoints: + web: + exposure: + include: "*" + endpoint: + restart: + enabled: true + + +spring: + kafka: + admin: + properties: + cleanup: + policy: compact + main: + lazy-initialization: true + application: + name: subscription-manager + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + bindings: + SUBSCRIPTIONS_WRITE_CHANNEL: + destination: SUBSCRIPTIONS + contentType: application/json + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP + password: ngb + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_registrymanager + username: ngb +writer: + datasource: + hikari: + connectionTimeout: 30000 + idleTimeout: 30000 + maxLifetime: 2000000 + maximumPoolSize: 20 + minimumIdle: 5 + poolName: SpringBootHikariCP_Writer + password: ngb + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_storagemanager_writer + username: ngb + enabled: true +reader: + enabled: true + datasource: + url: "jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_storagemanager_reader" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP_Reader + maxLifetime: 2000000 + connectionTimeout: 30000 diff --git a/scorpio-broker/Core/SubscriptionManager/src/main/resources/log4j2-spring.xml b/scorpio-broker/Core/SubscriptionManager/src/main/resources/log4j2-spring.xml new file mode 100644 index 0000000000000000000000000000000000000000..248a59a278ee881a53176deb81364a99141a266f --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/main/resources/log4j2-spring.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + %d %p %C{1.} [%t] %m%n + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/scorpio-broker/Core/SubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/SubscriptionHandlerTest.java b/scorpio-broker/Core/SubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/SubscriptionHandlerTest.java new file mode 100644 index 0000000000000000000000000000000000000000..29d5b864753b5c63ec631423f99eab1a57133edb --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/SubscriptionHandlerTest.java @@ -0,0 +1,13 @@ +package eu.neclab.ngsildbroker.subscriptionmanager; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; +@RunWith(SpringRunner.class) +@SpringBootTest(properties= {"spring.main.allow-bean-definition-overriding=true"}) +public class SubscriptionHandlerTest { + @Test + public void contextLoads() { + } +} \ No newline at end of file diff --git a/scorpio-broker/Core/SubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionControllerTest.java b/scorpio-broker/Core/SubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionControllerTest.java new file mode 100644 index 0000000000000000000000000000000000000000..6c41b4009996f86494f340e6e81fa5a1195e81bc --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionControllerTest.java @@ -0,0 +1,260 @@ +package eu.neclab.ngsildbroker.subscriptionmanager.controller; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.net.URI; +import java.util.ArrayList; +import java.util.List; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.MockitoAnnotations; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.web.servlet.MockMvc; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.subscriptionmanager.service.SubscriptionService; + +@SpringBootTest(properties= {"spring.main.allow-bean-definition-overriding=true"}) +@RunWith(SpringRunner.class) +@AutoConfigureMockMvc//(secure = false) +public class SubscriptionControllerTest { + @Autowired + private MockMvc mockMvc; + + @MockBean + private SubscriptionService subscriptionService; + + + @InjectMocks + private SubscriptionController subscriptionController; + + + @Autowired + ContextResolverBasic contextResolver; + + @Autowired + ParamsResolver paramsResolver; + + private String subscriptionEntityPayload; + + + @Before + public void setup() throws Exception { + MockitoAnnotations.initMocks(this); + + //@formatter:off + + subscriptionEntityPayload="{" + + "\r\n\"id\": \"urn:ngsi-ld:Subscription:211\"," + + "\r\n\"type\": \"Subscription\"," + + "\r\n\"entities\": [{" + + "\r\n \"id\": \"urn:ngsi-ld:Vehicle:A143\"," + + "\r\n \"type\": \"Vehicle\"" + + "\r\n }]," + + "\r\n\"watchedAttributes\": [\"brandName\"]," + + "\r\n \"q\":\"brandName!=Mercedes\"," + + "\r\n\"notification\": {" + + "\r\n \"attributes\": [\"brandName\"]," + + "\r\n \"format\": \"keyValues\"," + + "\r\n \"endpoint\": {" + + "\r\n \"uri\": \"mqtt://localhost:1883/notify\"," + + "\r\n \"accept\": \"application/json\"," + + "\r\n \"notifierinfo\": {" + + "\r\n \"version\" : \"mqtt5.0\"," + + "\r\n \"qos\" : 0" + + "\r\n }" + + "\r\n }" + + "\r\n}" + + "\r\n}"; + + //@formatter:on + } + + + @After + public void tearDown() { + subscriptionEntityPayload=null; + + } + + /** + * this method is use for subscribe the entity + */ + + @Test + public void createSubscriptionEntityTest() { + try { + URI uri = new URI("urn:ngsi-ld:Subscription:211"); + when(subscriptionService.subscribe(any())).thenReturn(uri); + mockMvc.perform(post("/ngsi-ld/v1/subscriptions").contentType(AppConstants.NGB_APPLICATION_JSON) + .accept(AppConstants.NGB_APPLICATION_JSONLD).content(subscriptionEntityPayload)). + andExpect(status().isCreated()); + verify(subscriptionService, times(1)).subscribe(any()); + + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is try to subscribe the entity having "BAD REQUEST" + */ + + @Test + public void createSubscriptionEntityBadRequestTest() { + try { + when(subscriptionService.subscribe(any())). + thenThrow(new ResponseException(ErrorType.BadRequestData)); + mockMvc.perform(post("/ngsi-ld/v1/subscriptions").contentType(AppConstants.NGB_APPLICATION_JSON) + .accept(AppConstants.NGB_APPLICATION_JSONLD).content(subscriptionEntityPayload)).andExpect(status(). + isBadRequest()).andExpect(jsonPath("$.title").value("Bad Request Data.")); + + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is use for the subscribe entity if subscribe entity already exists + */ + + @Test + public void createSubscriptionEntityAlreadyExistTest() { + try { + when(subscriptionService.subscribe(any())). + thenThrow(new ResponseException(ErrorType.AlreadyExists)); + mockMvc.perform(post("/ngsi-ld/v1/subscriptions").contentType(AppConstants.NGB_APPLICATION_JSON) + .accept(AppConstants.NGB_APPLICATION_JSONLD).content(subscriptionEntityPayload)).andExpect(status().isConflict()). + andExpect(jsonPath("$.title").value("Already exists.")); + } catch (Exception e) { + Assert.fail(); + } + } + + /** + * this method is used get the subscribe entity by ID. + */ + + @Test + public void getSubscriptionEntityTest() { + try { + List context = new ArrayList<>(); + Subscription subscription = null; + subscription = DataSerializer.getSubscription(subscriptionEntityPayload); + when(subscriptionService.getSubscription(any())).thenReturn(subscription); + mockMvc.perform(get("/ngsi-ld/v1/subscriptions/urn:ngsi-ld:Subscription:211") + .accept(AppConstants.NGB_APPLICATION_JSON)).andExpect(status().isOk()); + verify(subscriptionService, times(1)).getSubscription(any()); + } catch (Exception e) { + e.printStackTrace(); + } + } + + /** + * this method is used get the subscribe entity. + */ + + @Test + public void getSubscriptionEntity() { + try { + + mockMvc.perform(get("/ngsi-ld/v1/subscriptions/") + .accept(AppConstants.NGB_APPLICATION_JSON)).andExpect(status().isOk()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is use for delete subscription + */ + + @Test + public void deleteSubscriptionTest() { + try { + mockMvc.perform(delete("/ngsi-ld/v1/subscriptions/{id}", "urn:ngsi-ld:Subscription:211") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)).andExpect(status().isNoContent()); + verify(subscriptionService, times(1)).unsubscribe(any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is update the subscription + */ + + @Test + public void updateSubscriptionTest() { + try { + mockMvc.perform(patch("/ngsi-ld/v1/subscriptions/urn:ngsi-ld:Subscription:211/") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(subscriptionEntityPayload)) + .andExpect(status().isNoContent()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + + } + + /** + * this method is update the subscription + */ + + @Test + public void updateSubscriptionNotFoundTest() { + try { + when(subscriptionService.updateSubscription(any())). + thenThrow(new ResponseException(ErrorType.NotFound)); + mockMvc.perform(patch("/ngsi-ld/v1/subscriptions/urn:ngsi-ld:Subscription:211/") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(subscriptionEntityPayload)) + .andExpect(status().isNotFound()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + + } + + /** + * this method is used get the subscribe entity by Id. + * @throws Exception + */ + + @Test + public void getSubscriptionEntityById() throws Exception { + when(subscriptionService.getSubscription(any())). + thenThrow(new ResponseException(ErrorType.NotFound)); + mockMvc.perform(get("/ngsi-ld/v1/subscriptions/urn:ngsi-ld:Subscription:211") + .accept(AppConstants.NGB_APPLICATION_JSON)).andExpect(status().isNotFound()); + + } +} diff --git a/scorpio-broker/Core/SubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionServiceTest.java b/scorpio-broker/Core/SubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionServiceTest.java new file mode 100644 index 0000000000000000000000000000000000000000..089bf7e1e0a4271f98e0f0e37f481379621ca89e --- /dev/null +++ b/scorpio-broker/Core/SubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionServiceTest.java @@ -0,0 +1,145 @@ +package eu.neclab.ngsildbroker.subscriptionmanager.service; + +import static org.junit.Assert.assertNotNull; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.MockitoAnnotations; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.datatypes.SubscriptionRequest; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; + +@SpringBootTest(properties= {"spring.main.allow-bean-definition-overriding=true"}) +@RunWith(SpringRunner.class) +@AutoConfigureMockMvc//(secure = false) + +public class SubscriptionServiceTest { + + @Mock + private KafkaOps kafkaOperations; + + @Mock + SubscriptionRequest subscriptionRequest; + + @Mock + IntervalNotificationHandler intervalHandlerREST; + + @Mock + HashMap subscriptionId2Subscription; + + @InjectMocks + private SubscriptionService manager; + + @Mock + Subscription subscriptionMock; + + @Mock + IntervalNotificationHandler intervalNotificationHandler; + + //resolved payload of a subscription. + private String resolved="{\r\n \"https://uri.etsi.org/ngsi-ld/entities\" : [ " + + "{\r\n \"@type\" : [ \"https://uri.etsi.org/ngsi-ld/default-context/Vehicle\" ]" + + "\r\n } ],\r\n \"@id\" : \"urn:ngsi-ld:Subscription:173223\"," + + "\r\n \"https://uri.etsi.org/ngsi-ld/notification\" : [ " + + "{\r\n \"https://uri.etsi.org/ngsi-ld/endpoint\" : [ " + + "{\r\n \"https://uri.etsi.org/ngsi-ld/accept\" : [ " + + "{\r\n \"@value\" : \"application/json\"" + + "\r\n } ],\r\n \"https://uri.etsi.org/ngsi-ld/uri\" : [ {" + + "\r\n \"@value\" : \"http://localhost:8080/acc\"\r\n } ]" + + "\r\n } ]\r\n } ],\r\n \"@type\" : [ \"https://uri.etsi.org/ngsi-ld/Subscription\" ]" + + "\r\n}"; + + @Before + public void setUp() throws Exception { + System.out.println("method called"); + MockitoAnnotations.initMocks(this); + } + + /** + * this method is used to test create subscription test + */ + @Test + public void serviceTest() throws ResponseException { + List context = new ArrayList<>(); + Subscription subscription = null; + subscription = DataSerializer.getSubscription(resolved); + SubscriptionRequest subRequest = new SubscriptionRequest(subscription, context); + URI subId = manager.subscribe(subRequest); + verify(kafkaOperations, times(1)).pushToKafka(any(),any(),any()); + + } + + /** + * this method is used to test update subscription test + */ + @Test + public void updateSubscriptionTest() { + List context = new ArrayList<>(); + Subscription subscription = null; + subscription = DataSerializer.getSubscription(resolved); + SubscriptionRequest subRequest = new SubscriptionRequest(subscription, context); + when(subscriptionId2Subscription.get(any())).thenReturn(new Subscription()); + try { + manager.updateSubscription(subRequest); + } catch (Exception e) { + verify(subscriptionId2Subscription,times(1)).get(any()); + } +} + + + /** + * this method is used to test getAllSubscriptions method of SubscriptionService class + */ + @Test + public void getAllSubscriptionsTest() { + List result=manager.getAllSubscriptions(0); + assertNotNull(result); + } + + /** + * this method is used to test unsubscribe method of SubscriptionService class + */ + @Test + public void unsubscribeTest() throws URISyntaxException, ResponseException { + Subscription removedSub=new Subscription(); + URI id=new URI("urn:ngsi-ld:Subscription:173223"); + when(subscriptionId2Subscription.remove(any())).thenReturn(removedSub); + manager.unsubscribe(id); + //verify(kafkaOperations, times(1)).pushToKafka(any(),any(),any()); + verify(intervalNotificationHandler,times(1)).removeSub(any()); + } + + /** + * this method is used to test getSubscription method of SubscriptionService class + */ + @Test + public void getSubscriptionTest() { + String errorMessage=null; + try { + manager.getSubscription(""); + } catch (ResponseException e) { + errorMessage=e.getMessage(); + } + Assert.assertEquals(errorMessage, "Resource not found."); + } +} diff --git a/scorpio-broker/Core/TestManager/.gitignore b/scorpio-broker/Core/TestManager/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..870145a4fc121a32a371733f25a20bd8bcfa7f94 --- /dev/null +++ b/scorpio-broker/Core/TestManager/.gitignore @@ -0,0 +1,5 @@ +/target/ +/.settings/ +.classpath +.project +bin/ \ No newline at end of file diff --git a/scorpio-broker/Core/TestManager/pom.xml b/scorpio-broker/Core/TestManager/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..5dab8fd98e2262f7f01e5b4635cd6f6f9a2b0027 --- /dev/null +++ b/scorpio-broker/Core/TestManager/pom.xml @@ -0,0 +1,14 @@ + + 4.0.0 + TestManager + jar + + eu.neclab.ngsildbroker + BrokerParent + 1.0.0-SNAPSHOT + ../../BrokerParent + + 1.0.0-SNAPSHOT + \ No newline at end of file diff --git a/scorpio-broker/Core/TestManager/src/main/java/eu/neclab/ngsildbroker/testManager/TestMgrHandler.java b/scorpio-broker/Core/TestManager/src/main/java/eu/neclab/ngsildbroker/testManager/TestMgrHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..ec6edb893f04e12821a615507e8257f0d8abb5b5 --- /dev/null +++ b/scorpio-broker/Core/TestManager/src/main/java/eu/neclab/ngsildbroker/testManager/TestMgrHandler.java @@ -0,0 +1,29 @@ +package eu.neclab.ngsildbroker.testManager; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.context.annotation.Bean; +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; +import eu.neclab.ngsildbroker.commons.securityConfig.SecurityConfig; + + + + +@SpringBootApplication +public class TestMgrHandler {// implements QueryHandlerInterface{ + + public static void main(String[] args) { + SpringApplication.run(TestMgrHandler.class, args); + } + + + @Bean + SecurityConfig securityConfig() { + return new SecurityConfig(); + } + + @Bean + ResourceConfigDetails resourceConfigDetails() { + return new ResourceConfigDetails(); + } +} diff --git a/scorpio-broker/Core/TestManager/src/main/java/eu/neclab/ngsildbroker/testManager/config/ResourceConfigurer.java b/scorpio-broker/Core/TestManager/src/main/java/eu/neclab/ngsildbroker/testManager/config/ResourceConfigurer.java new file mode 100644 index 0000000000000000000000000000000000000000..a1b948fb5c6be5cac842a6b266b4e422e23de9f7 --- /dev/null +++ b/scorpio-broker/Core/TestManager/src/main/java/eu/neclab/ngsildbroker/testManager/config/ResourceConfigurer.java @@ -0,0 +1,27 @@ +package eu.neclab.ngsildbroker.testManager.config; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer; +import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter; +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; + +/** + * REST API Resource Server. + */ +@Configuration +@EnableWebSecurity +@EnableResourceServer +@EnableGlobalMethodSecurity(prePostEnabled = true) // Allow method annotations like @PreAuthorize +public class ResourceConfigurer extends ResourceServerConfigurerAdapter { + @Autowired + private ResourceConfigDetails resourceConfigDetails; + + @Override + public void configure(HttpSecurity http) throws Exception { + resourceConfigDetails.ngbSecurityConfig(http); + } +} diff --git a/scorpio-broker/Core/TestManager/src/main/java/eu/neclab/ngsildbroker/testManager/controller/TestMgrController.java b/scorpio-broker/Core/TestManager/src/main/java/eu/neclab/ngsildbroker/testManager/controller/TestMgrController.java new file mode 100644 index 0000000000000000000000000000000000000000..be4cc0c62acd64b4e0680555c2f35f6f3ed487f9 --- /dev/null +++ b/scorpio-broker/Core/TestManager/src/main/java/eu/neclab/ngsildbroker/testManager/controller/TestMgrController.java @@ -0,0 +1,54 @@ +package eu.neclab.ngsildbroker.testManager.controller; + +import java.util.Enumeration; + +import javax.servlet.http.HttpServletRequest; + +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.ResponseBody; +import org.springframework.web.bind.annotation.RestController; + + + +@RestController +public class TestMgrController {// implements QueryHandlerInterface { + + + //TODO + /** + * Method(POST) for "/ngsi-ld/v1/entities/" rest endpoint. + * + * @param payload + * jsonld message + * @return ResponseEntity object + */ + @RequestMapping(method = RequestMethod.POST, value = "/test") + @ResponseBody + public ResponseEntity createEntity(HttpServletRequest request, @RequestBody String payload) { + //URI result = null; + //ResponseEntity result = new ResponseEntity(); + + System.out.println("Headers:"); + Enumeration headerNames = request.getHeaderNames(); + while(headerNames.hasMoreElements()) { + String next = headerNames.nextElement(); + System.out.println("Key: " + next); + System.out.println("Values:"); + Enumeration headers = request.getHeaders(next); + while(headers.hasMoreElements()) { + System.out.println(headers.nextElement()); + } + System.out.println(); + } + System.out.println(payload); + String result = "Tested Successfully"; + + System.out.println(result); + return ResponseEntity.ok(result); + + } + +} diff --git a/scorpio-broker/Core/TestManager/src/main/java/eu/neclab/ngsildbroker/testManager/services/TestMgrService.java b/scorpio-broker/Core/TestManager/src/main/java/eu/neclab/ngsildbroker/testManager/services/TestMgrService.java new file mode 100644 index 0000000000000000000000000000000000000000..882bda3c4c8ddbfc0b91b0b0a11c27a03edb8fcc --- /dev/null +++ b/scorpio-broker/Core/TestManager/src/main/java/eu/neclab/ngsildbroker/testManager/services/TestMgrService.java @@ -0,0 +1,25 @@ +package eu.neclab.ngsildbroker.testManager.services; + + +//@Service +public class TestMgrService { + + //TODOpublic class Greeting { + + private final long id; + private final String content; + + public TestMgrService(long id, String content) { + this.id = id; + this.content = content; + } + + public long getId() { + return id; + } + + public String getContent() { + return content; + } + +} diff --git a/scorpio-broker/Core/TestManager/src/main/resources/application.yml b/scorpio-broker/Core/TestManager/src/main/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..472347b974f732b58f040745cf5a6d3c075eac6d --- /dev/null +++ b/scorpio-broker/Core/TestManager/src/main/resources/application.yml @@ -0,0 +1,36 @@ +spring: + application: + name: testmanager + kafka: + admin: + properties: + cleanup: + policy: compact + cloud: + stream: + kafka: + binder: + # brokers: localhost:9092 + #bindings: + #ENTITY_READ_CHANNEL: + # destination: ENTITY + # contentType: application/json + # group: entity +server: + port: 1010 + tomcat: + max: + threads: 20 +#Entity-Manager properties +entity: + topic: ENTITY +query: + topic: QUERY + result: + topic: QUERY_RESULT +bootstrap: + servers: localhost:9092 +#enable log compaction + + + \ No newline at end of file diff --git a/scorpio-broker/Core/assembly.xml b/scorpio-broker/Core/assembly.xml new file mode 100644 index 0000000000000000000000000000000000000000..6b7914e298c331fa0885297d39bfbc41899ca4ca --- /dev/null +++ b/scorpio-broker/Core/assembly.xml @@ -0,0 +1,61 @@ + + NGSILDBrokerRelease + + dir + + true + + + + + + + + + + + org.osgi:org.osgi.compendium + eu.neclab.mediationgateway:* + org.easymock:easymock + net.sf.kxml:kxml2 + org.osgi:org.osgi.service.component.annotations + org.osgi:org.osgi.service.metatype.annotations + xmlpull:xmlpull + org.slf4j:slf4j-api + commons-logging:commons-logging + + ${artifact.artifactId}.${artifact.extension} + bundle + + + + + + + true + + eu.nec.leaf:eu.nec.leaf.webinterface:war + + + bundle + false + true + + org.osgi:org.osgi.service.component.annotations + org.osgi:org.osgi.service.metatype.annotations + eu.neclab.mediationgateway:* + org.easymock:easymock + org.osgi:org.osgi.compendium + net.sf.kxml:kxml2 + xmlpull:xmlpull + org.slf4j:slf4j-api + commons-logging:commons-logging + + ${artifact.artifactId}.${artifact.extension} + + + + diff --git a/scorpio-broker/Core/pom.xml b/scorpio-broker/Core/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..340bc74bf3e1f8de363c8530593333b406187f01 --- /dev/null +++ b/scorpio-broker/Core/pom.xml @@ -0,0 +1,23 @@ + + 4.0.0 + eu.neclab.ngsildbroker.core + Core + pom + + + eu.neclab.ngsildbroker + BrokerParent + 1.0.0-SNAPSHOT + ../BrokerParent + + + + EntityManager + QueryManager + SubscriptionManager + TestManager + AtContextServer + InfoServer + + diff --git a/scorpio-broker/EntityManager/target/classes/META-INF/jandex.idx b/scorpio-broker/EntityManager/target/classes/META-INF/jandex.idx new file mode 100644 index 0000000000000000000000000000000000000000..ca7b9a92bd8b1876e46677c4c90c8aea4da0ba0d Binary files /dev/null and b/scorpio-broker/EntityManager/target/classes/META-INF/jandex.idx differ diff --git a/scorpio-broker/EntityManager/target/classes/application-activemq.properties b/scorpio-broker/EntityManager/target/classes/application-activemq.properties new file mode 100644 index 0000000000000000000000000000000000000000..4238b907dc46be4964daf2131ab5aaa99a0181b3 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/application-activemq.properties @@ -0,0 +1,19 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:61616} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=activemq +mysettings.messageconnection.options= +camel.component.activemq.broker-url=${bootstrap.servers} + + +scorpio.messaging.maxSize=1048576 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### +mp.messaging.outgoing.entity.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.entity}${mysettings.messageconnection.options} +mp.messaging.outgoing.entity.connector=smallrye-camel + + diff --git a/scorpio-broker/EntityManager/target/classes/application-kafka.properties b/scorpio-broker/EntityManager/target/classes/application-kafka.properties new file mode 100644 index 0000000000000000000000000000000000000000..0c639c9aece61918728ab308a92d0c80fb2f6d32 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/application-kafka.properties @@ -0,0 +1,14 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:9092} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +scorpio.messaging.maxSize=1048576 +#Kafka settings +kafka.bootstrap.servers=${bootstrap.servers} +#readability block########### +mp.messaging.outgoing.entity.topic=${scorpio.topics.entity} +mp.messaging.outgoing.entity.connector=smallrye-kafka +mp.messaging.incoming.registryretrieve.connector=smallrye-kafka +mp.messaging.incoming.registryretrieve.topic=${scorpio.topics.registry} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true + diff --git a/scorpio-broker/EntityManager/target/classes/application-mqtt.properties b/scorpio-broker/EntityManager/target/classes/application-mqtt.properties new file mode 100644 index 0000000000000000000000000000000000000000..f8a91ec40cd0ae2ac290409c87e02ecabd64899e --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/application-mqtt.properties @@ -0,0 +1,18 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:1883} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=paho-mqtt5 +mysettings.messageconnection.options= +camel.component.paho-mqtt5.broker-url=tcp://${bootstrap.servers} + +scorpio.messaging.maxSize=268435455 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### +mp.messaging.outgoing.entity.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.entity}${mysettings.messageconnection.options} +mp.messaging.outgoing.entity.connector=smallrye-camel + + diff --git a/scorpio-broker/EntityManager/target/classes/application-rabbitmq.properties b/scorpio-broker/EntityManager/target/classes/application-rabbitmq.properties new file mode 100644 index 0000000000000000000000000000000000000000..ab7aefd3119b89b2f7570da4ab93a3611c6d67ad --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/application-rabbitmq.properties @@ -0,0 +1,19 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:5672} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=rabbitmq +mysettings.messageconnection.options= +#?addresses=localhost:5672 +scorpio.messaging.maxSize=134217728 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### +mp.messaging.outgoing.entity.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.entity}${mysettings.messageconnection.options} +mp.messaging.outgoing.entity.connector=smallrye-camel +camel.component.rabbitmq.hostname=localhost +camel.component.rabbitmq.port-number=5672 + + diff --git a/scorpio-broker/EntityManager/target/classes/application-sqs.properties b/scorpio-broker/EntityManager/target/classes/application-sqs.properties new file mode 100644 index 0000000000000000000000000000000000000000..7105cbb2ea2e7bedea74f296ab75bdf4c8a6dd2c --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/application-sqs.properties @@ -0,0 +1,20 @@ +#mysettings.kafka.bootstrap.host=${bushost:localhost} +#mysettings.kafka.bootstrap.port=${busport:5672} +#bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +#camel.component.aws2-sqs.maximum-message-size=10485760 +mysettings.messageconnection.protocol=sns-fanout + +mysettings.messageconnection.options= +#?addresses=localhost:5672 +scorpio.messaging.maxSize=262144 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### +mp.messaging.outgoing.entity.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.entity}${mysettings.messageconnection.options} +mp.messaging.outgoing.entity.connector=smallrye-camel + + + diff --git a/scorpio-broker/EntityManager/target/classes/application.properties b/scorpio-broker/EntityManager/target/classes/application.properties new file mode 100644 index 0000000000000000000000000000000000000000..03fd4ac212b6d151405e3150f2b711ea302180a2 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/application.properties @@ -0,0 +1,45 @@ +quarkus.application.name=entity-manager +quarkus.http.port=1025 +quarkus.log.level=INFO +quarkus.log.category."org.apache.kafka".level=ERROR +quarkus.ssl.native=true +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:9092} +mysettings.postgres.host=${dbhost:localhost} +mysettings.postgres.port=${dbport:5432} +mysettings.postgres.username=${dbuser:ngb} +mysettings.postgres.password=${dbpass:ngb} +mysettings.postgres.database-name=${dbname:ngb} +mysettings.gateway.host=${gateway.host:localhost} +mysettings.gateway.port=${gateway.port:9090} +scorpio.at-context-server=http://at-context-server:2023 +atcontext.url=${scorpio.at-context-server}/ngsi-ld/v1/jsonldContexts/ +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +jdbcurl=jdbc:postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name}?ApplicationName=${quarkus.application.name} +scorpio.gatewayurl=http://localhost:9090 +scorpio.directDB=true +scorpio.entity.max-limit=1000 +scorpio.entity.default-limit=50 +scorpio.entity.batch-operations.create.max=1000 +scorpio.entity.batch-operations.upsert.max=1000 +scorpio.entity.batch-operations.update.max=1000 +scorpio.entity.batch-operations.delete.max=1000 +scorpio.entity.batch-operations.query.max=1000 +scorpio.topics.entity=ENTITY +scorpio.topics.registry=REGISTRY +#Database settings +quarkus.datasource.db-kind=postgresql +quarkus.datasource.username=${mysettings.postgres.username} +quarkus.datasource.password=${mysettings.postgres.password} +quarkus.datasource.jdbc.url=${jdbcurl} +quarkus.datasource.reactive.url=postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name} +quarkus.datasource.reactive.shared=true +quarkus.datasource.reactive.max-size=20 +#quarkus.datasource.reactive.name=blabliblub +quarkus.flyway.migrate-at-start=true +quarkus.flyway.baseline-on-migrate=true +quarkus.flyway.connect-retries=10 +quarkus.flyway.repair-at-start=true +selfhostcorecontext=http://localhost:9090/corecontext +ngsild.corecontext=https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context-v1.3.jsonld \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20190604.1__entity.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20190604.1__entity.sql new file mode 100644 index 0000000000000000000000000000000000000000..a8259f8bf603988add0925c6985b979640cdc13e --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20190604.1__entity.sql @@ -0,0 +1,57 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS Entity ( + id TEXT NOT NULL, + type TEXT, + data JSONB NOT NULL, + context JSONB, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + observationSpace GEOMETRY(Geometry, 4326), + operationSpace GEOMETRY(Geometry, 4326), + PRIMARY KEY (id)) +; + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER entity_extract_jsonb_fields BEFORE INSERT OR UPDATE ON entity + FOR EACH ROW EXECUTE PROCEDURE entity_extract_jsonb_fields(); + +-- create indexes for performance +CREATE INDEX i_entity_type ON entity (type); +CREATE INDEX i_entity_createdat ON entity (createdat); +CREATE INDEX i_entity_modifiedat ON entity (modifiedat); +CREATE INDEX i_entity_location ON entity USING GIST (location); +CREATE INDEX i_entity_observationspace ON entity USING GIST (observationspace); +CREATE INDEX i_entity_operationspace ON entity USING GIST (operationspace); + +-- to check if this index will be used by the database optimizer, or if it should be applied only for for certain keys +-- check https://www.postgresql.org/docs/current/static/datatype-json.html +CREATE INDEX i_entity_data ON entity USING GIN (data); diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20190604.2__registry.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20190604.2__registry.sql new file mode 100644 index 0000000000000000000000000000000000000000..51d767f1a6ee0368765817ebce039132d1160bd9 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20190604.2__registry.sql @@ -0,0 +1,135 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS csource ( + id TEXT NOT NULL, + data JSONB NOT NULL, + type TEXT, + name TEXT, + description TEXT, + timestamp_start TIMESTAMP, + timestamp_end TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + expires TIMESTAMP, + endpoint TEXT, + internal boolean default false, + has_registrationinfo_with_attrs_only BOOL NOT NULL DEFAULT FALSE, + has_registrationinfo_with_entityinfo_only BOOL NOT NULL DEFAULT FALSE, + PRIMARY KEY (id)) +; + +-- create indexes for performance +CREATE INDEX i_csource_data ON csource USING GIN (data); +CREATE INDEX i_csource_name ON csource (name); +CREATE INDEX i_csource_timestamp_start ON csource (timestamp_start); +CREATE INDEX i_csource_timestamp_end ON csource (timestamp_end); +CREATE INDEX i_csource_location ON csource USING GIST (location); +CREATE INDEX i_csource_expires ON csource (expires); +CREATE INDEX i_csource_endpoint ON csource (endpoint); +CREATE INDEX i_csource_internal ON csource (internal); + +CREATE TABLE IF NOT EXISTS csourceinformation ( + id BIGSERIAL, + csource_id TEXT NOT NULL REFERENCES csource(id) ON DELETE CASCADE ON UPDATE CASCADE, + group_id BIGINT, + entity_id TEXT, + entity_idpattern TEXT, + entity_type TEXT, + property_id TEXT, + relationship_id TEXT, + PRIMARY KEY (id)) +; +CREATE SEQUENCE csourceinformation_group_id_seq OWNED BY csourceinformation.group_id; -- used by csource trigger +-- create indexes for performance +CREATE INDEX i_csourceinformation_csource_id ON csourceinformation (csource_id); +CREATE INDEX i_csourceinformation_entity_type_id_idpattern ON csourceinformation (entity_type, entity_id, entity_idpattern); +CREATE INDEX i_csourceinformation_entity_type_id ON csourceinformation (entity_type, entity_id); +CREATE INDEX i_csourceinformation_entity_type_idpattern ON csourceinformation (entity_type, entity_idpattern); +CREATE INDEX i_csourceinformation_property_id ON csourceinformation (property_id); +CREATE INDEX i_csourceinformation_relationship_id ON csourceinformation (relationship_id); +CREATE INDEX i_csourceinformation_group_property_relationship ON csourceinformation (group_id, property_id, relationship_id); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields_to_information_table(); \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20190604.3__temporal.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20190604.3__temporal.sql new file mode 100644 index 0000000000000000000000000000000000000000..bc9d603a2b3ed51124507c18e77df266bb80c91a --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20190604.3__temporal.sql @@ -0,0 +1,105 @@ +BEGIN; + +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS temporalentity ( + id TEXT NOT NULL, + type TEXT, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + PRIMARY KEY (id)) +; + +CREATE TABLE IF NOT EXISTS temporalentityattrinstance ( + internalid BIGSERIAL, + temporalentity_id TEXT NOT NULL REFERENCES temporalentity(id) ON DELETE CASCADE ON UPDATE CASCADE, + attributeid TEXT NOT NULL, + instanceid TEXT, + attributetype TEXT, + value TEXT, -- object (relationship) is also stored here + geovalue GEOMETRY, + createdat TIMESTAMP, + modifiedat TIMESTAMP, + observedat TIMESTAMP, + data JSONB NOT NULL, + static BOOL NOT NULL, + PRIMARY KEY (internalid)) +; +CREATE UNIQUE INDEX i_temporalentityattrinstance_entityid_attributeid_instanceid ON temporalentityattrinstance (temporalentity_id, attributeid, instanceid); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_extract_jsonb_fields BEFORE INSERT OR UPDATE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_update_static() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + f_count integer; + BEGIN + select into f_internalid, f_count min(internalid), count(1) from temporalentityattrinstance + where temporalentity_id = OLD.temporalentity_id AND attributeid = OLD.attributeid; + IF (f_count = 1) THEN + UPDATE temporalentityattrinstance SET static = true WHERE internalid = f_internalid; + END IF; + RETURN OLD; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_update_static AFTER DELETE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_update_static(); + +-- create indexes for performance + +CREATE INDEX i_temporalentity_type ON temporalentity (type); + +CREATE INDEX i_temporalentityattrinstance_data ON temporalentityattrinstance USING GIN (data); + +COMMIT; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20190611.1__sysattrs.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20190611.1__sysattrs.sql new file mode 100644 index 0000000000000000000000000000000000000000..3e24ff4111eb19227e14f1b3a2b90a2d863f337e --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20190611.1__sysattrs.sql @@ -0,0 +1,7 @@ +-- entity +ALTER TABLE entity ALTER data DROP NOT NULL; +ALTER TABLE entity ADD data_without_sysattrs JSONB; + +-- csource +ALTER TABLE csource ALTER data DROP NOT NULL; +ALTER TABLE csource ADD data_without_sysattrs JSONB; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ab6fe3c66556cde740eaf56948e95d4556fa7c9 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql @@ -0,0 +1,121 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20190703.1__keyvalues.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20190703.1__keyvalues.sql new file mode 100644 index 0000000000000000000000000000000000000000..fae8d021431fadf39732600f684e69f5aa43447a --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20190703.1__keyvalues.sql @@ -0,0 +1 @@ +ALTER TABLE entity ADD kvdata JSONB; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..13b2ff5ba21ce08cd58465e6b7b9240c592f6f5c --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql @@ -0,0 +1,131 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20210206.1__tenant_function.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20210206.1__tenant_function.sql new file mode 100644 index 0000000000000000000000000000000000000000..899626ca4ed38154b7e8344e98e1e0b41459d391 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20210206.1__tenant_function.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20210206.2__tenant_field.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20210206.2__tenant_field.sql new file mode 100644 index 0000000000000000000000000000000000000000..6e5e7a7599f89a684574be098ed4a96d75068c1d --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20210206.2__tenant_field.sql @@ -0,0 +1 @@ +ALTER TABLE csource ADD tenant_id TEXT; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20210206.3__tenant_table.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20210206.3__tenant_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ea65d8e5fd612f8a5f0a3cd20d9ae081aba11f1 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20210206.3__tenant_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS tenant ( + tenant_id TEXT NOT NULL, + database_name varchar(255) UNIQUE, + PRIMARY KEY (tenant_id) +); \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20211217.1__subscription_table.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20211217.1__subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..104b878e08881a8de88364102af8b82ac5cd1a1f --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20211217.1__subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..28f87847b253efcabcac9dc467a64ea1774766fa --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS registry_subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql new file mode 100644 index 0000000000000000000000000000000000000000..b8fc302dd290e0b4a560b3b5bf0c09e5fa0a199a --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql @@ -0,0 +1,163 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}')) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}')) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getGeoJson (ldjson jsonb) +RETURNS jsonb AS $geojson$ +declare + geojson jsonb; +BEGIN + SELECT json_build_object('type', substring(ldjson#>>'{@type,0}' from 32),'coordinates',getCoordinates(ldjson#>'{https://purl.org/geojson/vocab#coordinates,0,@list}')) into geojson; + RETURN geojson; +END; +$geojson$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20220124.1__scope_support.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20220124.1__scope_support.sql new file mode 100644 index 0000000000000000000000000000000000000000..40f3e01afad101fbea692822b60923ab63123965 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20220124.1__scope_support.sql @@ -0,0 +1,52 @@ +ALTER TABLE public.entity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes,'{-1}', (i#>'{@value}')) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..77f733a8e2015aac5d0c1190fb0b5bbd6256fd24 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..0167acd3afc6a30007b262cef29778be77ec9089 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql @@ -0,0 +1,103 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20220126.1__scope_support_2.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20220126.1__scope_support_2.sql new file mode 100644 index 0000000000000000000000000000000000000000..6f7224edef85a212c0e339117292b2fbd78307e1 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20220126.1__scope_support_2.sql @@ -0,0 +1,128 @@ +ALTER TABLE public.csource + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +ALTER TABLE public.temporalentity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20220127.1__scope_support_3.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20220127.1__scope_support_3.sql new file mode 100644 index 0000000000000000000000000000000000000000..aef923126f490e1683b02763d8cb70eb7f971c26 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20220127.1__scope_support_3.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..a27bbc3ad1a40b4e5e7ad176746076c6cace0d70 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql new file mode 100644 index 0000000000000000000000000000000000000000..7710a0ee88d8dfd878acef4b862d42c051bb0d56 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}'), true) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}'), true) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql new file mode 100644 index 0000000000000000000000000000000000000000..6b5247225608c9e0224d3e823dcfa651b14cdfb0 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql new file mode 100644 index 0000000000000000000000000000000000000000..64998eb0a070a7e846fb27e46173897875035395 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + IF scopes IS NULL THEN + return false; + END IF; + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql new file mode 100644 index 0000000000000000000000000000000000000000..3fcb41a0d6a8461a015ac825c6a21ec9af3476e9 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql @@ -0,0 +1,150 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; + +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..36f137d1768dfa06191276d5fbb6cdf1319b1ef6 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = FALSE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20221122.1__move161.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20221122.1__move161.sql new file mode 100644 index 0000000000000000000000000000000000000000..2bfd6cf469984dc77c1e20130833088fd0b3423d --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20221122.1__move161.sql @@ -0,0 +1,554 @@ +DROP TABLE csourceinformation; + +Alter table public.csource DROP COLUMN "location",DROP COLUMN "name", DROP COLUMN endpoint,DROP COLUMN description,DROP COLUMN timestamp_end,DROP COLUMN timestamp_start,DROP COLUMN tenant_id,DROP COLUMN internal,DROP COLUMN has_registrationinfo_with_attrs_only,DROP COLUMN has_registrationinfo_with_entityinfo_only,DROP COLUMN data_without_sysattrs,DROP COLUMN scopes, DROP COLUMN expires, DROP COLUMN type; + +ALTER TABLE PUBLIC.CSOURCE RENAME COLUMN data TO REG; + +alter table public.csource rename column id to c_id; + +ALTER TABLE PUBLIC.CSOURCE DROP CONSTRAINT csource_pkey; + +ALTER TABLE IF EXISTS public.csource + ADD CONSTRAINT unique_c_id UNIQUE (c_id); + +ALTER TABLE IF EXISTS public.csource + ADD COLUMN id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ); + +ALTER TABLE public.csource ADD PRIMARY KEY (id); + +CREATE INDEX i_csource_c_id + ON public.csource USING hash + (c_id text_pattern_ops); + +CREATE INDEX i_csource_id + ON public.csource USING btree + (id); + + +CREATE TABLE public.csourceinformation( + id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ), + cs_id bigint, + c_id text, + e_id text, + e_id_p text, + e_type text, + e_prop text, + e_rel text, + i_location GEOMETRY(Geometry, 4326), + scopes text[], + expires timestamp without time zone, + endpoint text, + tenant_id text, + headers jsonb, + reg_mode smallint, + createEntity boolean, + updateEntity boolean, + appendAttrs boolean, + updateAttrs boolean, + deleteAttrs boolean, + deleteEntity boolean, + createBatch boolean, + upsertBatch boolean, + updateBatch boolean, + deleteBatch boolean, + upsertTemporal boolean, + appendAttrsTemporal boolean, + deleteAttrsTemporal boolean, + updateAttrsTemporal boolean, + deleteAttrInstanceTemporal boolean, + deleteTemporal boolean, + mergeEntity boolean, + replaceEntity boolean, + replaceAttrs boolean, + mergeBatch boolean, + retrieveEntity boolean, + queryEntity boolean, + queryBatch boolean, + retrieveTemporal boolean, + queryTemporal boolean, + retrieveEntityTypes boolean, + retrieveEntityTypeDetails boolean, + retrieveEntityTypeInfo boolean, + retrieveAttrTypes boolean, + retrieveAttrTypeDetails boolean, + retrieveAttrTypeInfo boolean, + createSubscription boolean, + updateSubscription boolean, + retrieveSubscription boolean, + querySubscription boolean, + deleteSubscription boolean, + entityMap boolean, + canCompress boolean, + CONSTRAINT id_pkey PRIMARY KEY (id), + CONSTRAINT cs_id_fkey FOREIGN KEY (cs_id) + REFERENCES public.csource (id) MATCH SIMPLE + ON UPDATE CASCADE + ON DELETE CASCADE +); + + +CREATE INDEX IF NOT EXISTS fki_cs_id_fkey + ON public.csourceinformation(cs_id); + +CREATE INDEX i_csourceinformation_e_type + ON public.csourceinformation USING hash + (e_type text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_rel + ON public.csourceinformation USING hash + (e_rel text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_prop + ON public.csourceinformation USING hash + (e_prop text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_id + ON public.csourceinformation USING hash + (e_id text_pattern_ops); + +CREATE INDEX i_csourceinformation_i_location + ON public.csourceinformation USING gist + (i_location gist_geometry_ops_2d); + +DROP FUNCTION public.csource_extract_jsonb_fields_to_information_table cascade; +DROP Trigger csource_extract_jsonb_fields ON csource; + +CREATE TABLE temp ( + c_id text, + reg jsonb +); +INSERT INTO temp SELECT c_id, reg FROM csource; + +DELETE FROM csource; + +INSERT INTO csource SELECT c_id, reg FROM temp; + +drop table temp; + +ALTER TABLE PUBLIC.ENTITY RENAME COLUMN DATA TO ENTITY; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN DATA_WITHOUT_SYSATTRS; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN KVDATA; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OBSERVATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OPERATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN CONTEXT; + +ALTER TABLE PUBLIC.ENTITY ADD COLUMN E_TYPES TEXT[]; + +CREATE INDEX "I_entity_scopes" + ON public.entity USING gin + (scopes array_ops); + +CREATE INDEX "I_entity_types" + ON public.entity USING gin + (e_types array_ops); + +CREATE OR REPLACE FUNCTION public.entity_extract_jsonb_fields() RETURNS trigger LANGUAGE plpgsql AS $function$ + BEGIN + + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NULL AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NOT NULL AND OLD.ENTITY <> NEW.ENTITY) THEN + NEW.createdat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + IF (NEW.ENTITY@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.ENTITY ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + NEW.scopes = getScopes(NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + RETURN NEW; + END; +$function$; + +UPDATE ENTITY SET E_TYPES=array_append(E_TYPES,TYPE); + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN type; + + +CREATE OR REPLACE FUNCTION CSOURCE_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE +BEGIN + NEW.C_ID = NEW.REG#>>'{@id}'; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,false,false]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS(); + +CREATE OR REPLACE FUNCTION GETMODE (MODETEXT text) RETURNS smallint AS $registry_mode$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$registry_mode$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; + + + +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + RETURN jsonb_set(ENTITY,ARRAY[attribName], tmp); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; + + diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230108.1__subscription161.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230108.1__subscription161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c8115353d5ba16497cc30b10ef8a1fe6e0915041 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230108.1__subscription161.sql @@ -0,0 +1,18 @@ +DROP TABLE subscriptions; +DROP TABLE registry_subscriptions; + +CREATE TABLE public.subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); + +CREATE TABLE public.registry_subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230212.1__context.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230212.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..665c49dd33b0c8c5bfea4e2361c29df16fd01e7d --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230212.1__context.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS public.contexts +( + id text NOT NULL, + body jsonb NOT NULL, + kind text NOT NULL, + createdat timestamp without time zone, + PRIMARY KEY (id) +); +ALTER TABLE public.contexts alter createdat set default now(); diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230220.1__batchops161.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230220.1__batchops161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c31264330e2d38c953e892ff29b43295aedfc5ea --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230220.1__batchops161.sql @@ -0,0 +1,99 @@ +CREATE OR REPLACE FUNCTION NGSILD_CREATEBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOCR$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj['success'] = resultObj['success'] || (entity->'@id')::jsonb; + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOCR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_agg(entityId); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || (newentity->'@id')::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230309.1__datamigration161.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230309.1__datamigration161.sql new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230311.1__temporal161.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230311.1__temporal161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c502a34416bf47b00231f8be37f6dba50a7c0c55 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230311.1__temporal161.sql @@ -0,0 +1,65 @@ +ALTER TABLE PUBLIC.temporalentity ADD COLUMN E_TYPES TEXT[]; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN VALUE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN attributetype; +CREATE INDEX "I_temporalentity_types" + ON public.temporalentity USING gin + (e_types array_ops); +UPDATE temporalentity SET E_TYPES=array_append(E_TYPES,TYPE); +ALTER TABLE PUBLIC.temporalentity DROP COLUMN type; +ALTER TABLE PUBLIC.temporalentity ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN static; +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopeEntry (scopeList text[]) +RETURNS jsonb AS $scopes$ +declare + scopes jsonb; + i text; +BEGIN + scopes := '[]'::jsonb; + FOREACH i IN ARRAY scopeList LOOP + scopes = scopes || jsonb_build_object('@value', i); + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + if scopeList is null THEN + RETURN null; + END IF; + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE INDEX i_temporalentityattrinstance_attribname + ON public.temporalentityattrinstance USING hash + (attributeid text_ops); +CREATE INDEX i_temporalentity_location ON public.temporalentityattrinstance USING GIST (geovalue); \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230410.1__entitymap.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230410.1__entitymap.sql new file mode 100644 index 0000000000000000000000000000000000000000..92b172eb27cbfb372bfc729a44b1009b3946e4d5 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230410.1__entitymap.sql @@ -0,0 +1,19 @@ +CREATE TABLE public.entitymap +( + "q_token" text NOT NULL, + "entity_id" text, + "remote_hosts" jsonb, + "order_field" numeric NOT NULL +); + +CREATE INDEX i_entitymap_qtoken + ON public.entitymap USING hash + ("q_token" text_pattern_ops) +; + +CREATE TABLE public.entitymap_management +( + q_token text NOT NULL, + last_access timestamp with time zone NOT NULL, + PRIMARY KEY (q_token) +); diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230623.1__merge_patch.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230623.1__merge_patch.sql new file mode 100644 index 0000000000000000000000000000000000000000..684f327524131fa450d4e3deba24b4ab762ed4db --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230623.1__merge_patch.sql @@ -0,0 +1,36 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +if merged_json::text like '%"urn:ngsi-ld:null"%' THEN +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +end if; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..29a8a59a3c89cdad8b22af1254310c3d3f88c4c9 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql @@ -0,0 +1,29 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id'; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230705.1__core_context_store.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230705.1__core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..66bf42339d3705b05931f4a532703aa74769dc73 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230705.1__core_context_store.sql @@ -0,0 +1,300 @@ +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } +} +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql new file mode 100644 index 0000000000000000000000000000000000000000..af7e046119aac14e17ee33dc1cc6a074d723977c --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql @@ -0,0 +1,128 @@ +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230726.1__fixsubs.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230726.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..4520fbc02736783525f5e80a3980b023ce99263c --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230726.1__fixsubs.sql @@ -0,0 +1 @@ +update subscriptions set subscription=subscription-'https://uri.etsi.org/ngsi-ld/lastFailure ' \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230810.1__historyup.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230810.1__historyup.sql new file mode 100644 index 0000000000000000000000000000000000000000..06402b2bf88db1ca416edda068dc0dee6706574d --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230810.1__historyup.sql @@ -0,0 +1,39 @@ +ALTER TABLE IF EXISTS public.temporalentityattrinstance + ADD COLUMN IF NOT EXISTS location geometry; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_location + ON public.temporalentityattrinstance USING gist + (location) + WITH (buffering=auto) +; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_entityid + ON public.temporalentityattrinstance USING hash + (temporalentity_id) +; +with x as (SELECT distinct temporalentity_id as eid, geovalue, modifiedat as mat, observedat as oat, COALESCE(modifiedat, observedat) FROM temporalentityattrinstance WHERE geovalue is not null ORDER BY COALESCE(modifiedat, observedat)) UPDATE temporalentityattrinstance SET location = (SELECT x.geovalue FROM x WHERE eid = temporalentity_id and COALESCE(x.mat, x.oat) <= COALESCE(modifiedat, observedat) ORDER BY COALESCE(modifiedat, observedat) DESC limit 1) WHERE location is not null; + +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ + diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql new file mode 100644 index 0000000000000000000000000000000000000000..a17d3b8879ba7f194546f3f3ace5f41e42e9a2ec --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql @@ -0,0 +1,52 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql new file mode 100644 index 0000000000000000000000000000000000000000..82cac5034c11506304e8109eb2aa122cd408b952 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql @@ -0,0 +1,56 @@ +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + IF not attribValues ? 'https://uri.etsi.org/ngsi-ld/modifiedAt' THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + tmp := jsonb_set(tmp,Array['0','https://uri.etsi.org/ngsi-ld/modifiedAt'], Entity->'https://uri.etsi.org/ngsi-ld/modifiedAt',true); + END IF; + RETURN jsonb_set(Entity,Array[attribName,'0'], (Entity->attribName->0) || (tmp->0),true); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..833426b43969a0c3842988b8d0631e776f23cbd0 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql @@ -0,0 +1,314 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabularyProperty": "ngsi-ld:VocabularyProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20230904.1__fixsubs.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20230904.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..02ca66926497a6b82e4bcf2d39ad6a5e9ec38489 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20230904.1__fixsubs.sql @@ -0,0 +1 @@ +UPDATE SUBSCRIPTIONS SET SUBSCRIPTION=JSONB_SET(SUBSCRIPTION, '{@id}', ('"'||SUBSCRIPTION_ID||'"')::jsonb, true); \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql new file mode 100644 index 0000000000000000000000000000000000000000..a09bbd49ecbaa11601b43f09a7d630fcbcaf446b --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql @@ -0,0 +1,96 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', entityId, 'old', prev_entity); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql new file mode 100644 index 0000000000000000000000000000000000000000..5088d096c22fe1aa5e8b82aa5391b25dbd76a0e3 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql @@ -0,0 +1,57 @@ +DROP FUNCTION merge_json(text,jsonb); + +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; +ret := jsonb_build_array(previous_entity, merged_json); + + RETURN ret; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..795a2f213be016348be3eebc8c31bcd77c9f3a8f --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql @@ -0,0 +1,25 @@ +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE teai.internalid = new.internalid and COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql new file mode 100644 index 0000000000000000000000000000000000000000..a7437255d864ad92561c657c4e23a22cb4d951b5 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql @@ -0,0 +1,75 @@ +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb, NOOVERWRITE boolean) RETURNS jsonb AS $ENTITYOAR$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Overwriting'); + ELSIF NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + ELSE + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + + RETURN resultObj; +END; +$ENTITYOAR$ +LANGUAGE PLPGSQL; + + +ALTER TABLE temporalentityattrinstance ADD COLUMN IF NOT EXISTS static boolean \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20231128.1__upsertfix.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20231128.1__upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..573c77b1b3701ed5532925bada113667267c7dbe --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20231128.1__upsertfix.sql @@ -0,0 +1,44 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..017016b3606fcb09d107b10217acec17bb799c2d --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql @@ -0,0 +1,363 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20240212.1__merge_batchops.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20240212.1__merge_batchops.sql new file mode 100644 index 0000000000000000000000000000000000000000..c5da5b65a9b6a9189123871366d0d474a238c250 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20240212.1__merge_batchops.sql @@ -0,0 +1,66 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON_BATCH(b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id',newentity->'@id')::jsonb; + ELSE + resultObj['failure'] := resultObj['failure'] || jsonb_object_agg(newentity->'@id'->>0, 'Not Found'); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20240319.1__context.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20240319.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..38ae052ffe9a214504c3912b7b5e6c1a92b17308 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20240319.1__context.sql @@ -0,0 +1,365 @@ +ALTER TABLE public.contexts add column lastUsage timestamp without time zone, add column numberOfHits bigint default 0; + +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'Hosted'); \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..19e8cf97e5ecba2781bc4d559f05787b4fd3e9a3 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql @@ -0,0 +1,663 @@ + +DROP TABLE IF EXISTS public.entitymap; +DROP TABLE IF EXISTS public.entitymap_management; +DROP FUNCTION IF EXISTS ngsild_appendbatch(jsonb); +DROP FUNCTION IF EXISTS ngsild_upsertbatch(jsonb); + +CREATE OR REPLACE FUNCTION public.ngsild_deletebatch(IN entity_ids jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, 'Not Found')); + else + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', prev_entity)); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_createbatch(IN entities jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || (entity->'@id')::jsonb); + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_appendbatch(IN entities jsonb,IN nooverwrite boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Overwriting')); + ELSIF NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + ELSE + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb); + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_upsertbatch(IN entities jsonb,IN do_replace boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE TABLE public.entitymap +( + id text, + expires_at timestamp without time zone, + last_access timestamp without time zone, + entity_map jsonb, + followup_select text, + PRIMARY KEY (id) +); + +CREATE OR REPLACE FUNCTION public.getmode(IN modetext text) + RETURNS smallint + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.updateMapIfNeeded(IN ids text[], ientityMap jsonb, entityMapToken text) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entityMapEntry jsonb; + +BEGIN + if array_length(ids, 1) = 0 or ids is null then + return ientityMap; + else + entityMapEntry := ientityMap -> 'entityMap'; + SELECT jsonb_agg(entry) INTO entityMapEntry FROM jsonb_array_elements(entityMapEntry) as entry, jsonb_object_keys(entry) as id WHERE NOT(id = ANY(ids)); + ientityMap := jsonb_set(ientityMap, '{entityMap}', entityMapEntry); + UPDATE ENTITYMAP SET LAST_ACCESS = NOW(), entity_map = ientityMap WHERE id=entityMapToken; + return ientityMap; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.getEntityMapAndEntities(IN entityMapToken text, ids text[], ilimit int, ioffset int) + RETURNS TABLE(id text, entity jsonb, parent boolean, e_types text[], entity_map jsonb) + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entitymap jsonb; + regempty boolean; + noRootLevelRegEntry boolean; + queryText text; +BEGIN + if ids is null or array_length(ids, 1) = 0 then + UPDATE ENTITYMAP SET LAST_ACCESS = NOW() WHERE ENTITYMAP.id=entityMapToken RETURNING ENTITYMAP.ENTITY_MAP INTO entitymap; + if entitymap is null then + RAISE EXCEPTION 'Nonexistent ID --> %', entityMapToken USING ERRCODE = 'S0001'; + end if; + regempty := entitymap -> 'regEmptyOrNoRegEntryAndNoLinkedQuery'; + noRootLevelRegEntry := entitymap -> 'noRootLevelRegEntryAndLinkedQuery'; + + if regempty or noRootLevelRegEntry then + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY > $2), ' + || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ' limit $3), X as (SELECT D0.ID as id, max(D0.ordinality) as maxOrdinality FROM D0 GROUP BY D0.ID), C as (SELECT updateMapIfNeeded(ids.aggIds, $4, $5) as entity_map FROM (SELECT ARRAY_AGG(a.id) as aggIds FROM a LEFT JOIN X ON a.id = X.ID WHERE X.ID IS NULL AND a.ordinality <= X.maxOrdinality) as ids)' + || (entitymap ->> 'finalselect')) using (entitymap->'entityMap'), ioffset, ilimit, entitymap, entityMapToken; + else + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY between $2 and ($2 + $3) and entityIdEntry.value ? ''@none''), C as (SELECT $4 as entity_map), ' || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ')' ||(entitymap ->> 'finalselect')) using entitymap->'entityMap', ioffset, ilimit, entitymap; + end if; + else + if regempty or noRootLevelRegEntry then + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + else + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + end if; + end if; +END; +$BODY$; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS entitymap; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS cancompress; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN queryEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN createEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN updateEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN deleteEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN retrieveEntityMap boolean; + +UPDATE public.csourceinformation SET queryEntityMap = false,createEntityMap = false, updateEntityMap = false, deleteEntityMap = false,retrieveEntityMap = false; + +CREATE OR REPLACE FUNCTION public.getoperations(IN operationjson jsonb) + RETURNS boolean[] + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + operations[37] = true; + operations[38] = true; + operations[39] = true; + operations[40] = true; + operations[41] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'queryEntityMap' THEN + operations[37] = true; + WHEN 'createEntityMap' THEN + operations[38] = true; + WHEN 'updateEntityMap' THEN + operations[39] = true; + WHEN 'deleteEntityMap' THEN + operations[40] = true; + WHEN 'retrieveEntityMap' THEN + operations[41] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.csourceinformation_extract_jsonb_fields() + RETURNS trigger + LANGUAGE 'plpgsql' + VOLATILE + COST 100 +AS $BODY$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..474a2ef4780544dc6697fefec62900f6c79bc1ed --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql @@ -0,0 +1,834 @@ +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id',newentity->>'@id', 'old', previous_entity)); + ELSE + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$BODY$; + +UPDATE contexts SET body = '{ + + "@context": { + + "@version": 1.1, + + "@protected": true, + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + + "geojson": "https://purl.org/geojson/vocab#", + + "id": "@id", + + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + + "AttributeList": "ngsi-ld:AttributeList", + + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + + "Date": "ngsi-ld:Date", + + "DateTime": "ngsi-ld:DateTime", + + "EntityType": "ngsi-ld:EntityType", + + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + + "EntityTypeList": "ngsi-ld:EntityTypeList", + + "Feature": "geojson:Feature", + + "FeatureCollection": "geojson:FeatureCollection", + + "GeoProperty": "ngsi-ld:GeoProperty", + + "GeometryCollection": "geojson:GeometryCollection", + + "JsonProperty": "ngsi-ld:JsonProperty", + + "LanguageProperty": "ngsi-ld:LanguageProperty", + + "LineString": "geojson:LineString", + + "ListProperty": "ngsi-ld:ListProperty", + + "ListRelationship": "ngsi-ld:ListRelationship", + + "MultiLineString": "geojson:MultiLineString", + + "MultiPoint": "geojson:MultiPoint", + + "MultiPolygon": "geojson:MultiPolygon", + + "Notification": "ngsi-ld:Notification", + + "Point": "geojson:Point", + + "Polygon": "geojson:Polygon", + + "Property": "ngsi-ld:Property", + + "Relationship": "ngsi-ld:Relationship", + + "Subscription": "ngsi-ld:Subscription", + + "TemporalProperty": "ngsi-ld:TemporalProperty", + + "Time": "ngsi-ld:Time", + + "VocabProperty": "ngsi-ld:VocabProperty", + + "accept": "ngsi-ld:accept", + + "attributeCount": "attributeCount", + + "attributeDetails": "attributeDetails", + + "attributeList": { + + "@id": "ngsi-ld:attributeList", + + "@type": "@vocab" + + }, + + "attributeName": { + + "@id": "ngsi-ld:attributeName", + + "@type": "@vocab" + + }, + + "attributeNames": { + + "@id": "ngsi-ld:attributeNames", + + "@type": "@vocab" + + }, + + "attributeTypes": { + + "@id": "ngsi-ld:attributeTypes", + + "@type": "@vocab" + + }, + + "attributes": { + + "@id": "ngsi-ld:attributes", + + "@type": "@vocab" + + }, + + "attrs": "ngsi-ld:attrs", + + "avg": { + + "@id": "ngsi-ld:avg", + + "@container": "@list" + + }, + + "bbox": { + + "@container": "@list", + + "@id": "geojson:bbox" + + }, + + "cacheDuration": "ngsi-ld:cacheDuration", + + "containedBy": "ngsi-ld:isContainedBy", + + "contextSourceAlias": "ngsi-ld:contextSourceAlias", + + "contextSourceExtras": { + + "@id": "ngsi-ld:contextSourceExtras", + + "@type": "@json" + + }, + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + + "contextSourceTimeAt": { + + "@id": "ngsi-ld:contextSourceTimeAt", + + "@type": "DateTime" + + }, + + "contextSourceUptime": "ngsi-ld:contextSourceUptime", + + "cooldown": "ngsi-ld:cooldown", + + "coordinates": { + + "@container": "@list", + + "@id": "geojson:coordinates" + + }, + + "createdAt": { + + "@id": "ngsi-ld:createdAt", + + "@type": "DateTime" + + }, + + "csf": "ngsi-ld:csf", + + "data": "ngsi-ld:data", + + "dataset": { + + "@id": "ngsi-ld:hasDataset", + + "@container": "@index" + + }, + + "datasetId": { + + "@id": "ngsi-ld:datasetId", + + "@type": "@id" + + }, + + "deletedAt": { + + "@id": "ngsi-ld:deletedAt", + + "@type": "DateTime" + + }, + + "description": "http://purl.org/dc/terms/description", + + "detail": "ngsi-ld:detail", + + "distinctCount": { + + "@id": "ngsi-ld:distinctCount", + + "@container": "@list" + + }, + + "endAt": { + + "@id": "ngsi-ld:endAt", + + "@type": "DateTime" + + }, + + "endTimeAt": { + + "@id": "ngsi-ld:endTimeAt", + + "@type": "DateTime" + + }, + + "endpoint": "ngsi-ld:endpoint", + + "entities": "ngsi-ld:entities", + + "pick": "ngsi-ld:pick", + + "omit": "ngsi-ld:omit", + + "jsonKeys": "ngsi-ld:jsonKeys", + + "entity": "ngsi-ld:entity", + + "entityCount": "ngsi-ld:entityCount", + + "entityId": { + + "@id": "ngsi-ld:entityId", + + "@type": "@id" + + }, + + "entityList": { + + "@id": "ngsi-ld:entityList", + + "@container": "@list" + + }, + + "entityMap": "ngsi-ld:hasEntityMap", + + "error": "ngsi-ld:error", + + "errors": "ngsi-ld:errors", + + "expiresAt": { + + "@id": "ngsi-ld:expiresAt", + + "@type": "DateTime" + + }, + + "features": { + + "@container": "@set", + + "@id": "geojson:features" + + }, + + "format": "ngsi-ld:format", + + "geoQ": "ngsi-ld:geoQ", + + "geometry": "geojson:geometry", + + "geoproperty": "ngsi-ld:geoproperty", + + "georel": "ngsi-ld:georel", + + "idPattern": "ngsi-ld:idPattern", + + "information": "ngsi-ld:information", + + "instanceId": { + + "@id": "ngsi-ld:instanceId", + + "@type": "@id" + + }, + + "isActive": "ngsi-ld:isActive", + + "join": "ngsi-ld:join", + + "joinLevel": "ngsi-ld:hasJoinLevel", + + "json": { + + "@id": "ngsi-ld:hasJSON", "@type": "@json" + + }, + + "jsons": { + + "@id": "ngsi-ld:jsons", + + "@container": "@list" + + }, + + "key": "ngsi-ld:hasKey", + + "lang": "ngsi-ld:lang", + + "languageMap": { + + "@id": "ngsi-ld:hasLanguageMap", + + "@container": "@language" + + }, + + "languageMaps": { + + "@id": "ngsi-ld:hasLanguageMaps", + + "@container": "@list" + + }, + + "lastFailure": { + + "@id": "ngsi-ld:lastFailure", + + "@type": "DateTime" + + }, + + "lastNotification": { + + "@id": "ngsi-ld:lastNotification", + + "@type": "DateTime" + + }, + + "lastSuccess": { + + "@id": "ngsi-ld:lastSuccess", + + "@type": "DateTime" + + }, + + "linkedMaps": "ngsi-ld:linkedMaps", + + "localOnly": "ngsi-ld:localOnly", + + "location": "ngsi-ld:location", + + "management": "ngsi-ld:management", + + "managementInterval": "ngsi-ld:managementInterval", + + "max": { + + "@id": "ngsi-ld:max", + + "@container": "@list" + + }, + + "min": { + + "@id": "ngsi-ld:min", + + "@container": "@list" + + }, + + "mode": "ngsi-ld:mode", + + "modifiedAt": { + + "@id": "ngsi-ld:modifiedAt", + + "@type": "DateTime" + + }, + + "notification": "ngsi-ld:notification", + + "notificationTrigger": "ngsi-ld:notificationTrigger", + + "notifiedAt": { + + "@id": "ngsi-ld:notifiedAt", + + "@type": "DateTime" + + }, + + "notifierInfo": "ngsi-ld:notifierInfo", + + "notUpdated": "ngsi-ld:notUpdated", + + "object": { + + "@id": "ngsi-ld:hasObject", + + "@type": "@id" + + }, + + "objectList": { + + "@id": "ngsi-ld:hasObjectList", + + "@container": "@list" + + }, + + "objects": { + + "@id": "ngsi-ld:hasObjects", + + "@container": "@list" + + }, + + "objectsLists": { + + "@id": "ngsi-ld:hasObjectsLists", + + "@container": "@list" + + }, + + "objectType": { + + "@id": "ngsi-ld:hasObjectType", + + "@type": "@vocab" + + }, + + "observationInterval": "ngsi-ld:observationInterval", + + "observationSpace": "ngsi-ld:observationSpace", + + "observedAt": { + + "@id": "ngsi-ld:observedAt", + + "@type": "DateTime" + + }, + + "operationSpace": "ngsi-ld:operationSpace", + + "operations": "ngsi-ld:operations", + + "previousJson": { + + "@id": "ngsi-ld:hasPreviousJson", + + "@type": "@json" + + }, + + "previousLanguageMap": { + + "@id": "ngsi-ld:hasPreviousLanguageMap", + + "@container": "@language" + + }, + + "previousObject": { + + "@id": "ngsi-ld:hasPreviousObject", + + "@type": "@id" + + }, + + "previousObjectList": { + + "@id": "ngsi-ld:hasPreviousObjectList", + + "@container": "@list" + + }, + + "previousValue": "ngsi-ld:hasPreviousValue", + + "previousValueList": { + + "@id": "ngsi-ld:hasPreviousValueList", + + "@container": "@list" + + }, + + "previousVocab": { + + "@id": "ngsi-ld:hasPreviousVocab", + + "@type": "@vocab" + + }, + + "properties": "geojson:properties", + + "propertyNames": { + + "@id": "ngsi-ld:propertyNames", + + "@type": "@vocab" + + }, + + "q": "ngsi-ld:q", + + "reason": "ngsi-ld:reason", + + "receiverInfo": "ngsi-ld:receiverInfo", + + "refreshRate": "ngsi-ld:refreshRate", + + "registrationId": "ngsi-ld:registrationId", + + "registrationName": "ngsi-ld:registrationName", + + "relationshipNames": { + + "@id": "ngsi-ld:relationshipNames", + + "@type": "@vocab" + + }, + + "scope": "ngsi-ld:scope", + + "scopeQ": "ngsi-ld:scopeQ", + + "showChanges": "ngsi-ld:showChanges", + + "startAt": { + + "@id": "ngsi-ld:startAt", + + "@type": "DateTime" + + }, + + "status": "ngsi-ld:status", + + "stddev": { + + "@id": "ngsi-ld:stddev", + + "@container": "@list" + + }, + + "subscriptionId": { + + "@id": "ngsi-ld:subscriptionId", + + "@type": "@id" + + }, + + "subscriptionName": "ngsi-ld:subscriptionName", + + "success": { + + "@id": "ngsi-ld:success", + + "@type": "@id" + + }, + + "sum": { + + "@id": "ngsi-ld:sum", + + "@container": "@list" + + }, + + "sumsq": { + + "@id": "ngsi-ld:sumsq", + + "@container": "@list" + + }, + + "sysAttrs": "ngsi-ld:sysAttrs", + + "temporalQ": "ngsi-ld:temporalQ", + + "tenant": { + + "@id": "ngsi-ld:tenant", + + "@type": "@id" + + }, + + "throttling": "ngsi-ld:throttling", + + "timeAt": { + + "@id": "ngsi-ld:timeAt", + + "@type": "DateTime" + + }, + + "timeInterval": "ngsi-ld:timeInterval", + + "timeout": "ngsi-ld:timeout", + + "timeproperty": "ngsi-ld:timeproperty", + + "timerel": "ngsi-ld:timerel", + + "timesFailed": "ngsi-ld:timesFailed", + + "timesSent": "ngsi-ld:timesSent", + + "title": "http://purl.org/dc/terms/title", + + "totalCount": { + + "@id": "ngsi-ld:totalCount", + + "@container": "@list" + + }, + + "triggerReason": "ngsi-ld:triggerReason", + + "typeList": { + + "@id": "ngsi-ld:typeList", + + "@type": "@vocab" + + }, + + "typeName": { + + "@id": "ngsi-ld:typeName", + + "@type": "@vocab" + + }, + + "typeNames": { + + "@id": "ngsi-ld:typeNames", + + "@type": "@vocab" + + }, + + "unchanged": "ngsi-ld:unchanged", + + "unitCode": "ngsi-ld:unitCode", + + "updated": "ngsi-ld:updated", + + "uri": "ngsi-ld:uri", + + "value": "ngsi-ld:hasValue", + + "valueList": { + + "@id": "ngsi-ld:hasValueList", + + "@container": "@list" + + }, + + "valueLists": { + + "@id": "ngsi-ld:hasValueLists", + + "@container": "@list" + + }, + + "values": { + + "@id": "ngsi-ld:hasValues", + + "@container": "@list" + + }, + + "vocab": { + + "@id": "ngsi-ld:hasVocab", + + "@type": "@vocab" + + }, + + "vocabs": { + + "@id": "ngsi-ld:hasVocabs", + + "@container": "@list" + + }, + + "watchedAttributes": { + + "@id": "ngsi-ld:watchedAttributes", + + "@type": "@vocab" + + }, + + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + + } + +} + +'::jsonb WHERE id=')$%^&'; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..90d4785b7e7d4b82c6ac1bf4c88ac56043f995bc --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql @@ -0,0 +1,963 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_point(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE +BEGIN + if not geo_json_entry ? '@list' or jsonb_array_length(geo_json_entry #> '{@list}') != 2 then + RAISE EXCEPTION 'Invalid geo point for geo json' USING ERRCODE = 'SB006'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.VALIDATE_GEO_JSON(IN GEO_JSON_ENTRY JSONB) RETURNS VOID LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + geo_type text; + value jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.clean_ngsi_ld_null(IN json_entry jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + json_type text; + result jsonb; + value jsonb; + cleaned jsonb; + key text; +BEGIN + json_type = jsonb_typeof(json_entry); + if json_type = 'array' then + result = '[]'::jsonb; + for value in select * from jsonb_array_elements(json_entry) loop + cleaned = clean_ngsi_ld_null(value); + if cleaned is not null then + result = result || cleaned; + end if; + end loop; + if jsonb_array_length(result) = 0 then + return null; + end if; + return result; + elsif json_type = 'object' then + result = '{}'; + for key, value in Select * from jsonb_each(json_entry) loop + if value::text != '"urn:ngsi-ld:null"' then + result = jsonb_set(result, '{key}', value); + end if; + end loop; + if result::text = '{}' then + return null; + end if; + return result; + else + if json_entry::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return json_entry; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_json(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_type text; + old_type text; + todelete jsonb; + deleted integer; + i integer; + index integer; + value jsonb; + value2 jsonb; + merged_json jsonb; + key text; +BEGIN + new_type = jsonb_typeof(new_attrib); + old_type = jsonb_typeof(old_attrib); + if old_attrib is null or new_type != old_type then + old_attrib := new_attrib; + end if; + todelete = '[]'::jsonb; + if new_type = 'array' then + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + for i in 0 .. jsonb_array_length(new_attrib) loop + if new_attrib ->> i = 'urn:ngsi-ld:null' then + todelete = todelete || i; + end if; + end loop; + deleted = 0; + if array_length(todelete) > 0 then + for i in select * from jsonb_array_elements(todelete) loop + new_attrib = new_attrib - (i - deleted); + deleted = deleted + 1; + end loop; + end if; + return new_attrib; + end if; + index = 0; + deleted = 0; + for value in select * from jsonb_array_elements(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + index := index + 1; + continue; + end if; + value2 = old_attrib[index - deleted]; + merged_json = merge_has_json(value, value2); + if merged_json is null then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - deleted)]::text[], merged_json); + end if; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + elsif new_type = 'object' then + for key, value in Select * from jsonb_each(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - key; + continue; + end if; + merged_json = merge_has_json(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + continue; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end loop; + if old_attrib::text = '{}' then + return null; + end if; + return old_attrib; + else + if new_attrib::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return new_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_vocab(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_language_map(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + index integer; + remove boolean; + value2 jsonb; + ln_found boolean; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in Select * from jsonb_array_elements(new_attrib) loop + if value ->> '@language' = '@none' and value ->> '@value' = 'urn:ngsi-ld:null' then + return null; + else + index = 0; + ln_found = false; + remove = false; + for value2 in Select * from jsonb_array_elements(old_attrib) loop + if value2 ->> '@language' = value->> '@language' then + ln_found = true; + if value ->> '@value' = 'urn:ngsi-ld:null' then + remove = true; + end if; + exit; + end if; + index = index + 1; + end loop; + if ln_found then + if remove then + old_attrib = old_attrib - index; + else + old_attrib = jsonb_set(old_attrib, ARRAY[index,'@value']::text[], value->'@value'); + end if; + else + old_attrib = old_attrib || value; + end if; + end if; + end loop; + RETURN old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_geo(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + key text; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,https://purl.org/geojson/vocab#coordinates,0,@list,0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + for value in select * from jsonb_array_elements(new_attrib) loop + PERFORM validate_geo_json(value); + end loop; + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://purl.org/geojson/vocab#coordinates' then + if value2 #>> '{0,@list,0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + elsif key = '@type' then + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + PERFORM validate_geo_json(old_attrib[(index - removed)]); + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib #> '{0,@list}'; + if old_attrib is null then + old_attrib = new_attrib; + end if; + old_value_list = old_attrib #> '{0,@list}'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed)]::text[], (old_attrib #> ARRAY[0,'@list',(index-removed)]::text[]) - key); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed),key]::text[], merged_json); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB004'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; + +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib -> '@list'; + if old_attrib is null then + old_attrib := new_attrib; + end if; + old_value_list = old_attrib -> '@list'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], value2); + end if; + elsif key = '@list' then + merged_json = merge_has_value_list(value, old_value_list[index - removed]); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + + else + merged_json = merge_has_value(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + arr_idx integer; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + arr_idx := index - removed; + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - arr_idx; + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], value2); + end if; + else + arr_idx := index - removed; + merged_json = merge_has_value(value2, old_attrib #> ARRAY[arr_idx,key]::text[]); + if merged_json is null then + old_attrib[arr_idx] = old_attrib[arr_idx] - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_ATTRIB_INSTANCE(IN NEW_ATTRIB JSONB, + + IN OLD_ATTRIB JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + attrib_type TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + new_attrib := new_attrib - 'https://uri.etsi.org/ngsi-ld/createdAt'; + attrib_type := old_attrib #>> '{@type,0}'; + if attrib_type != new_attrib #>> '{@type,0}' then + RAISE EXCEPTION 'Cannot change type of an attribute' USING ERRCODE = 'SB001'; + end if; + if attrib_type = 'https://uri.etsi.org/ngsi-ld/Property' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/Relationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValueList' then + merged_json = merge_has_value_list(value[0], old_attrib #> '{key,0}'); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListRelationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectList' then + merged_json = merge_has_object_list(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/GeoProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value_geo(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/LanguageProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasLanguageMap' then + merged_json = merge_has_language_map(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/VocabProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasVocab' then + merged_json = merge_has_vocab(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/JsonProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasJSON' then + merged_json = merge_has_json(value #> ARRAY[0,'@value']::text[], old_attrib #> ARRAY[key,0,'@value']::text[]); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key,0,'@value']::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + else + RAISE EXCEPTION 'Unknown type of an attribute %, %, %', attrib_type, old_attrib, new_attrib USING ERRCODE = 'SB002'; + end if; + return old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + else + if new_dataset_id is null then + deleted := deleted || 'null'; + else + deleted := deleted || new_dataset_id; + end if; + end if; + else + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_JSON(IN A text,IN B JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL UNSAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + deleted JSONB; + updated JSONB; +BEGIN + +Select entity into previous_entity from entity where id =a; +if previous_entity is null then + RAISE EXCEPTION 'Entity not found.' USING ERRCODE = '02000'; +end if; +Select entity into merged_json from entity where id =a; +deleted := '{}'; +updated := '{}'; +-- Iterate through keys in JSON B +FOR key, value IN SELECT * FROM JSONB_EACH(b) +LOOP + if key = '@id' or key = 'https://uri.etsi.org/ngsi-ld/createdAt'then + continue; + elsif key = '@type' then + value2 = merged_json -> key; + WITH combined AS ( + SELECT jsonb_array_elements(value) AS elem + UNION + SELECT jsonb_array_elements(value2) AS elem + ) + SELECT jsonb_agg(elem) into value2 AS merged_array FROM combined; + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' then + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value); + else + value2 = merged_json -> key; + value2 = merge_attrib(value, value2); + if value2 ->'result' = 'null'::jsonb or jsonb_array_length(value2 ->'result') = 0 then + merged_json = merged_json - key; + deleted = jsonb_set(deleted, ARRAY[key]::text[], '["@all"]'::jsonb); + else + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2 -> 'result'); + if jsonb_array_length(value2 -> 'deleted') != 0 then + if deleted ? key then + deleted = jsonb_set(deleted, ARRAY[key], ((deleted -> key) || (value2 -> 'deleted'))); + else + deleted = jsonb_set(deleted, ARRAY[key], ((value2 -> 'deleted'))); + end if; + end if; + + if jsonb_array_length(value2 -> 'updated') != 0 then + if updated ? key then + updated = jsonb_set(updated, ARRAY[key], ((updated -> key) || (value2 -> 'updated'))); + else + updated = jsonb_set(updated, ARRAY[key], ((value2 -> 'updated'))); + end if; + end if; + + end if; + + + end if; +END LOOP; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + +RETURN jsonb_build_object('old', previous_entity, 'new', merged_json, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; + newentity jsonb; + resultObj jsonb; + entityId text; + index integer; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + index := 0; + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + entityId := newentity->>'@id'; + IF entityId is null then + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object('no id row nr ' || index, 'No entity id provided')); + else + BEGIN + ret := MERGE_JSON(entityId, newentity); + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', ret -> 'old', 'new', ret -> 'new', 'deleted', ret -> 'deleted', 'updated', ret -> 'updated')::jsonb); + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entityId, SQLSTATE)); + END; + end if; + index := index + 1; + END LOOP; + RETURN resultObj; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..f9eea8fca78af88cd9cca10817372067d2fec0e3 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql @@ -0,0 +1,69 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_json(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + geo_type text; + value jsonb; + value2 jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPoint' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi point update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiLineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPolygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/db/migration/V20240922.1__mergeattrib.sql b/scorpio-broker/EntityManager/target/classes/db/migration/V20240922.1__mergeattrib.sql new file mode 100644 index 0000000000000000000000000000000000000000..98411df52c5cfd8208b71983d6624d4bfd7452b9 --- /dev/null +++ b/scorpio-broker/EntityManager/target/classes/db/migration/V20240922.1__mergeattrib.sql @@ -0,0 +1,71 @@ +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || ('null'::jsonb); + else + updated := updated || to_jsonb(new_dataset_id); + end if; + else + if new_dataset_id is null then + deleted := deleted || ('null'::jsonb); + else + deleted := deleted || to_jsonb(new_dataset_id); + end if; + end if; + else + if new_dataset_id is null then + updated := updated || ('null'::jsonb); + else + updated := updated || to_jsonb(new_dataset_id); + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/controller/EntityBatchController.class b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/controller/EntityBatchController.class new file mode 100644 index 0000000000000000000000000000000000000000..b466a8e094def9cb39b0cde098ce5025f9519715 Binary files /dev/null and b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/controller/EntityBatchController.class differ diff --git a/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/controller/EntityController.class b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/controller/EntityController.class new file mode 100644 index 0000000000000000000000000000000000000000..b204966fea3b2c61bf8df788f5876b0de01368a2 Binary files /dev/null and b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/controller/EntityController.class differ diff --git a/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingBase.class b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingBase.class new file mode 100644 index 0000000000000000000000000000000000000000..2a31b007c84083112f9bc0662fb3fb94a8378d4a Binary files /dev/null and b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingBase.class differ diff --git a/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingByteArray.class b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingByteArray.class new file mode 100644 index 0000000000000000000000000000000000000000..5fb0a1bfe453bc363a010105933b85637d2e2069 Binary files /dev/null and b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingByteArray.class differ diff --git a/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingInMemory.class b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingInMemory.class new file mode 100644 index 0000000000000000000000000000000000000000..7d70960b8d2fca65b7cc92850e53da6143b47108 Binary files /dev/null and b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingInMemory.class differ diff --git a/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingString.class b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingString.class new file mode 100644 index 0000000000000000000000000000000000000000..52e292732884501303146cc4f845fc9136716bde Binary files /dev/null and b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingString.class differ diff --git a/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/services/EntityInfoDAO.class b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/services/EntityInfoDAO.class new file mode 100644 index 0000000000000000000000000000000000000000..52c06ade2c9d14c0d59841d3cc24425ea3ab467c Binary files /dev/null and b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/services/EntityInfoDAO.class differ diff --git a/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/services/EntityService.class b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/services/EntityService.class new file mode 100644 index 0000000000000000000000000000000000000000..10faade08a32f119be5bef705281aec7f4e492a3 Binary files /dev/null and b/scorpio-broker/EntityManager/target/classes/eu/neclab/ngsildbroker/entityhandler/services/EntityService.class differ diff --git a/scorpio-broker/EntityManager/target/entity-manager-5.0.5-SNAPSHOT.jar b/scorpio-broker/EntityManager/target/entity-manager-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..679cc6c76ba0f319948e07bfacec03ac906f9668 Binary files /dev/null and b/scorpio-broker/EntityManager/target/entity-manager-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/EntityManager/target/maven-archiver/pom.properties b/scorpio-broker/EntityManager/target/maven-archiver/pom.properties new file mode 100644 index 0000000000000000000000000000000000000000..58f13719da1154b720c1c283f109ac2439836e8c --- /dev/null +++ b/scorpio-broker/EntityManager/target/maven-archiver/pom.properties @@ -0,0 +1,5 @@ +#Generated by Maven +#Fri Jan 03 03:27:28 UTC 2025 +groupId=eu.neclab.ngsildbroker +artifactId=entity-manager +version=5.0.5-SNAPSHOT diff --git a/scorpio-broker/EntityManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst b/scorpio-broker/EntityManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..7620c1f7c9260195b781e4b72bb36ded723c2752 --- /dev/null +++ b/scorpio-broker/EntityManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst @@ -0,0 +1,8 @@ +eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingString.class +eu/neclab/ngsildbroker/entityhandler/controller/EntityBatchController.class +eu/neclab/ngsildbroker/entityhandler/services/EntityService.class +eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingBase.class +eu/neclab/ngsildbroker/entityhandler/controller/EntityController.class +eu/neclab/ngsildbroker/entityhandler/services/EntityInfoDAO.class +eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingByteArray.class +eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingInMemory.class diff --git a/scorpio-broker/EntityManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst b/scorpio-broker/EntityManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..af9a24b19c3e184fa76139d13f9716d7d7bbabb9 --- /dev/null +++ b/scorpio-broker/EntityManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst @@ -0,0 +1,8 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/controller/EntityBatchController.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/controller/EntityController.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingBase.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingByteArray.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingInMemory.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/messaging/EntityMessagingString.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/services/EntityInfoDAO.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/EntityManager/src/main/java/eu/neclab/ngsildbroker/entityhandler/services/EntityService.java diff --git a/scorpio-broker/EntityManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst b/scorpio-broker/EntityManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..c55e802018f59e4d86174bfad3f2c61a80befe53 --- /dev/null +++ b/scorpio-broker/EntityManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst @@ -0,0 +1,4 @@ +eu/neclab/ngsildbroker/entityhandler/controller/EntityControllerTest.class +eu/neclab/ngsildbroker/entityhandler/controller/EntityBatchControllerTest.class +eu/neclab/ngsildbroker/entityhandler/services/EntityServiceTest.class +eu/neclab/ngsildbroker/entityhandler/controller/CustomProfile.class diff --git a/scorpio-broker/EntityManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst b/scorpio-broker/EntityManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..44be1ae8251c29160b08e236d15c01d30db4e83d --- /dev/null +++ b/scorpio-broker/EntityManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst @@ -0,0 +1,5 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/controller/CustomProfile.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/controller/EntityBatchControllerTest.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/controller/EntityControllerTest.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/services/EntityServiceTest.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/EntityManager/src/test/java/eu/neclab/ngsildbroker/entityhandler/validationutil/ValidationUtilTest.java diff --git a/scorpio-broker/EntityManager/target/quarkus-app/app/entity-manager-5.0.5-SNAPSHOT.jar b/scorpio-broker/EntityManager/target/quarkus-app/app/entity-manager-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..4e16f962bdba4d55801ff7fa650539af4525ed3d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/app/entity-manager-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..9d5d452f4a66d1165b27d4d604d2bddaad0e4cd7 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d9c956f0001cff936eb20c373592dcc6510f5ff Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f9f583af71764a7127e4da73677ed5c8dea97bfb Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0447153ded16f86775cdcb337f8576579c1ac0db Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..243fa559c2be0f61720e1a333e43d4da2e8b5516 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ce4cbac674f51eb2063ff475a0e70484b25ace9f Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..55063658b2d25baf50b6a3963c508233f695b3de Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..4a7a8e4b537b532f4f58717f3366e9cb6f0ff0ce Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b320c9d42b93b26981927265e5dc62b85e73263f Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d2520242b28d239ba3b138c17e65f8ae6103a787 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..0648aa9f6a2ea3b3ddd083471f1c8dacbb4bbc4f Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..134f669d9fbee6ca61a9c9bb36227376ed97d0a2 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3365ab16bb0cd576f88b808e9af9b024111a5070 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..778ea557232b0fb41df34d63353c219a371660b2 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fd5c901f4bb0e8ed59d2d040740021a7c5cf1b19 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f74846f3f35230a9e14c0bf98e4cccfec593b4c9 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ee6d31a6a025d92f9fb05550483fb6b415b7f066 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..52835b2540d6cb8aaffe22bd5c7c24203cc77538 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..dba6fdd40e73a6dfc06a462bc2646c2bfb7e6d5a Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c13bcb9104e907195d13bbb4f998c1e5594cc2e8 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..34be9026612b9553f55f5f1aed148fc96a9d8fcb Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3750b8c1cfae96e79305618c78653ac5fb9b6de5 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..327ee706dcf46e428dd6339b9744e77941e5b498 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c47febca79456ba4d389bbb46ea0e11e6a41bede Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6ff58aed923740c9a4f639b9a512dcfd08df921 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3aa01f1ee73130e4983d2c3520220b29995c4ccc Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..288bf56e1b4f5c5a2bb2152887c5ef12e6cddeae Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..f4f1af783043658e2b3879560b6e1ff0b8db66a1 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..85cefa6d7b5b644fb99075f6621ca60beb350cd9 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..c4b2b999ce72dcfdd9b63d6a62d0a2d7e2037034 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1d6d1c50a344b45ba375d53775b70ad5aac58f7 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..18e59c8c41de73e02e77298e981fa7e3051e4b5d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a8f5dfe1b83122a9f085da1aa7fff451ed88e783 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b56dc751c1cc7dff75ed80ccbb45f027058e8ce Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar new file mode 100644 index 0000000000000000000000000000000000000000..10d10b62a49ad095f56d620620ee7eaa5d2fc62d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..3595c4f9be5c0ce779f8dd611e7f6917ca518f5d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a3336373b7aea1700b62d9aa60a15493586c3e8a Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..71079166b7bc51455b1e1d18ea4e5e942b3ae89f Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cbb633729cae09e5d65aefccd7b63c697f42b5cb Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eecfb70406fbaca61c7c9e5a549f77cbef2e849b Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a690bed6986df8a510ee4f05b2079264db7d71af Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5023670d73e75c539b0af285d35c4e9edaef2211 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eb3c2b0b82115e9820f781e944312b4c19b25ed4 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..705f285c9348d57ec059c73b90ed9836f4db6aa4 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..684cc24821451b65b9fc36376131490a9d03b37c Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..36f71a550601f4842536ad1a62a187a30c60eb69 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..b4f917b9687dc231c8f3b4cf3fc0a95e616846ea Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..878cc677337985f59ed9f4bb5cfcdb8ca4d0acbe Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..9afa6d70ae20b7082f786920e918fd70c138a5b3 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b626ceafb52c318581529fafbd1d33889f25c20 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..043052d031f59a3b289cc2bb7dda9b8352c58e11 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..316bdec6ab1a6ea4cd4dc33c9217cdf96e4c9049 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..73e95705d3152472d11fa2a5690626b652ff280a Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..7192fa0e49a0762ad44c218215e6197dd12197b2 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..4ecfb5dbec2f25d201de0a83d1143729830d49bd Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..072d06d46d38bcb7a63efb38075bf79ea111caf1 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e19ecdbd4d53bc0c09246f73926aaae70fe9493 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..08c1d314876730dc6f82ba65e741f03b8719609d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..3427ee30e0e35e876eda5e5f1bed695d8c2636e9 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5facdd8e2f6e4f4fa3ceecc16962b3b15b9627c2 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..575355b83ce351f70a605a46ef5fa7f75f956835 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..39f5787fb3d458de0fd9f575345176b60e050961 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..a1e0f39535831b6f914098b776513c284c50b351 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..9637836909b5e59f7a83f88412a8571b4641f56b Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..93bc0ad010aab29e0a328d2ffafaeb60c82b3ecf Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..f06de9ed2afaa3668bccce11dcfe8924446817d0 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9f5b95529a5c7ac6044ffa4c60e6d412a1597104 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..9edc36a8a46a29b0e5a507fbd092a0e6629b8db7 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fe2973fcc47beca49e8e4061fe60a78c2a27102 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..0aedb22b8903d6eb70232c1e310266e0465b5f54 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..084d172fc3512a05145851e902ab1377a9d8b814 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..b740a4a827c0808baeda7112a5c40158e4eba664 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f99a4fc63360f9f5d1f0d751928b0b93d62ac81 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..75e3104ca9d784fa7fcaca27ecd239c02ac6e241 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3eab7c30fefb9c141226da5595a5faf9ff83d462 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..196cc61fca2b75593a6ac4cd38744fb6fcdc33bd Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..070baec335bcc35a79bd1ddf7ee52e809331c4c3 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..80221e424d28060c8620964fef488107c54f4e94 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c607cbdf717926be37b821d11c9c4b0ed9a2802 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..17262bba0ff1a57d9e893d832f8268389adfe340 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..30ef9922f2c8fe0c6a28cd108f33f9632fe82411 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b3a47cef61d53f9997148c1502dbb4ec313a6a47 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5b530ad1e484936df857104583ffcaf78b1715e3 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..96950282b9778bf3a3bd26d4c9125ffdc0f95c96 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b447f8aeb87c42e4489c0db15f30f93d4707fa45 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..11a94c16234d6e460c6e66a840ac7976e756c458 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d325421d690f179c4626844df98915a36e74ea Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0089e0cb796e6183029df4847b1b600d974a8b1a Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..6abfe2c3ec3923ddea06c4e6f4a4a440d72fa649 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar new file mode 100644 index 0000000000000000000000000000000000000000..99c263dc3f71a215a4a85901538f1dedacb51acf Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d9dbbe1b73c8f5435b7309f77520f6cbff82600c Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5d3b9086c238e3c3dbb2af68ce47f52eb269c664 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c03be12b0b2c7549c2db53ed275e45cd87e8a4dc Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0bf27883a28a42e50d17bca78ce368d79acaea13 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f440dcb8f735c055768cd95c2bc4457c4cbbb48 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e7adeb414fcb6108390beabc72d94e7f933e055d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fba12107dee341bd5894a0e05deb09280561e517 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..48ec53f0dba9ea734d83d21f7d793bb2178bb8d1 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff161c12c4949a473a1d2ed1f597d6338337a321 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..577b2bb43f4750ecb4f5c1b9ba952d3866bcea22 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..10a28407ba7c16165164c35be0f9657812349a84 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d0ee5b30dbe7a8cb00c15600142ef6041a97f44 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e3c7750dd2c267fea46f510636ae576bf26894a Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a38e1850efbb73aaf248318253b11e0d0a64a50e Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9e257c119006bed0c77c8b8a6b2e05adfc71dde6 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a85e9e76c2870fca83485b31334a81a02f12781 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..548a6405f93dd3254c1123ecbcce2f96f6995e30 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f65840267108fe2b704976f14e3c17bd02290e20 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..68b053f16299ba0a9bcd80c6e795c2c9e0b186a1 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..89aaf8be3250033348a32ac75c1edc52d9d264ac Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..add785757c2a958f86e3a20c6034d46a38427d83 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e6f98b3a199f27f5c5ab47000862c0558116f3ef Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa88a2ec11f998225646b08795fedc215e530e6f Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cfd8c45d73d6d6e8da9c8fd24e04b2dcb86bff83 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6438a41b88c1605a994451cda5657f0a52669ad Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4bb4f9457907978ea011d1fc390c6ab2e6217888 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..494d0bb23d9ae6619c2a8f2e18ec458676f39316 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8ec4c28f2f004dc6d7b8ca0d36eeaa4b79f4a4bd Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d5bfe9c8b3c66f75327e6ce621da822c6b67fc Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..90607ec6492a8667ea6d2c39f77389e4083a2017 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8f8612553283b7f00d9dc7ab7eb5313b2f1eca2 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b0c51a8f29ef87a1c6390dd98c2e1e6aa0114f7 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1dcea88498ba439a3ac32e291067331c99331ace Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..92721ae048041c7dbddda0c88d417d740f3c299d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3199cb971aa88ab386df667ad1d16d74d3bee22f Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c0ebec7fa19a8e96489714a2f9986017522becfb Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4696de7fd6cd077015cad5212b223de1dcc80307 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..04bc9cc78782c3b7bc78553481b94a10a02f8f9d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..14d1445dba365c803f94f24f6fcb8467d13cf6e5 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5859e692a03f1173aa1f2b272a844c94d24c12e3 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c8cb76d3f0d9c0853168e740754f4d79042a105c Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b038ad4a83ac6df0e0a8f71265877dad2ded43ab Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c63764d56c7a95f9abf5230b80fae1a414651b61 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df5d82b54357c697c0d95fd9b8db277facfe0cde Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..07d3721a9c5c5e314e5eee21ec168139bcfed6c4 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8f7f3668d903b867c021d47fd08c02c45374e29c Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a774ecd7174486d0c6d077c388f1021c3b2ef05 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f90f28b0d6afa7518cd3095f64d8dbd9fd56b77d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ba5add79fc5dee32ae8d3dfc0ffeebe75541b603 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..d32bd30fa7bfa74da1330810bc3eb3c07f174385 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..cc828adf40879d957e266f0b781f7bb3d864af07 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..08a59e5bb2d5572b0fdbe693f48a3f0a5073d4f0 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b821617242260f35360fb4247a01d66dea563004 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9908f1069718a85031db676224857e26dc7401f0 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d09f435a3cd1ae0395b4926c78311be6b276e0c2 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9bf5d12b59534df460f6ccb01dad8c8ffcd8a542 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3c5639431a5d51ef16ef94d82225effb271e8467 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d55e5f07ddae6c98b08fbff4848461fdbb2adf73 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..760d92fc11fe3672030cdd7c71416ad75b8ac775 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7504902f7621ee1764e207e2f5b2ffdeaa22e261 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..11867e294ef35ca6e9a6a82fb52dd52821c35d48 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e268ff4ef4543b76568d66444df43ce45751d3bc Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..662cf14d5c427f09ea259c484befd4cc2d328b2d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..417d8b59b8ce5998e21e0275beb878ac50008bd8 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d3ab26e2476843ebb0d27c983443ff9a7ba3fee8 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f08a939bd2b90b9f87dc3f15ab88e15e8c48087e Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..abb08bb751b0e037476fc15d4da40a44d5432966 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57bc48ad35c44ab05ee4f8575f552ea0dd3b04c2 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..98d6fa439ad69c1dc9436d76d43e3a34636729ad Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..373471c5b69b1468fff9a50d614c1b84fa69fbdd Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff59cf0f6f80ce2f55e1f08afbd1750c7df6e2fc Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..49443391feae0e4b18c570a3a007db6187108031 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..723430b5a2624a415e2754bad5f69fd151e71dd3 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6169a6b468ab05de2c7962fc64ce8567f46c444 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b7cc8f08c3aa4845416101f90521f8736eef6973 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f1d68c985deb0d3b5c865a91ca3489e94d623dcf Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..99dc47ca15667630606af6d41f5e616acd66b168 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c7f07c892fbe634044ba8b8333aac2195d84a4b0 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..97a49b08f496d01d6a50e3b2455476c5b23ba50d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ece3eb2d02159abb38ed4ea6b8fc4b599616c79e Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9ed9c88db7be8648c5b501b787a1addf9f0c1981 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b8cc43f8b9896a24d916d7bfdbdd02e396d7db1 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b232bcabeac47c30ac02371188d10bdb5d214a88 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8701dd9d98c6c7d61ea547823759f909474f7a9d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..473ce54348410627710157274dd8d2fdfa39c0df Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..16255cf4093cd574a00574f812ee5109478fe9a4 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c3c712e90ce6ebdae145eec147d6d4a50bd0fe53 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..354030f4393f7f920b6e03ff894d8e5ea8727797 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..284a20484992c849fd9a5fb0f83f8f639f5d562f Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..467756a558a61103b2dc767e0833e561540ed8fd Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..477c706ef15c0f6622b55e64d3cb623cb3fd3ebd Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..305727ab77b9dcd6df44f8868efc6859b51a419e Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8c00aa871496b3293e1c008449162d449b9f64ea Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8a4fe6ed7c025ce502f81bbee92b36230457d647 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c322385c3404a0196e8d509f2f7cb892fcb0883a Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..2e158629507bc39be5c13b1b31fa3563e0ac44b1 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..b16bdbbc285bd848e6dac42208acfa3d6bfa6fab Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..0d015d54205c084ad31609cc2909853fee83476a Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..34c1d4394b358572a2c79b543cb7d094b0ba1b8d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..316080f3b56134e8a821a50511b0e831886c9184 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5edfd71412f1470c93366d821b353ebaa85ecac3 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a3214715591141e2d6fbb0f5b71f52126a571a5 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a92e099d4fc25523e2830fa9b8181d319c9369a7 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a5b5b5fc36bb88093fd25a30b2d1d7fbe9e3cba Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..dbcbea90f1434f04d12c2039f9213c704d82ec31 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a5d9089734b0a7061dc14c4afc35884cc507636 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1e7da4be43dd1a10393608d1aff9c7a87460461 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..80670a1b87a7680fbac8c690d599361f8dd8d2ea Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..48242829bc38133a9cdcd36f8b2a9eebc53ab91a Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bffe4865f5835839900292dce062bb2f24921d76 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d8c4c56c7943f2fb3fcf2207f77f3bb7dd623550 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57b4cba6199355aa6c8fc9ad96e01f122e8613ff Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c4fe13ea86560b5be41f0602c8cef57dac8fed8 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d3e9ba64ae347356342ce7bd81694068426ce4 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..29fb4d66863e0ed67c991f16d7224d8e1b88ed98 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..83006f2ec04934050d5d41b033761f8c6f93c80a Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..66bc6931d673d72136bf7634d9d0653576bb05de Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fb9dadfc4843c7b3bd48544fe9c39068014ae49 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57eb121f34753b359ed328615b12d1335cdef5e0 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..666c3e4d030b41a58901b540d76547698cea41b4 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f5768a039af60a776a24b738659227810a5fc563 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b2d092cdf3c4b145df42a44b536ed0f8a604574c Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..08f353ce447773c14714390536f2abde814dff5b Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df32ff7e11ad182102a805c854bfc71cf2fec90e Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a93a240a399ca96f7f6298a849de2fdc996f066d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8b05ebef8ea87d46d09eac112c1e8a5a9a549fc Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..eab628714f1e46bbabce09f59d5149071cb7b8ee Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bcf2ba147eb2b4d9c75a637aed2c96628a8369cd Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e12f44ed11522b1f45d2935972336fb6852cb26d Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7b0a2bef9c92372239c8136f4bacad2e44297a95 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..97e7161abe0534de78448dde1d119002283ced55 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a4e88280321db778bbdff6fce3aed8e6ce6e4980 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..da9302ff29a560b5f10d3184f25d699fe2d9c186 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..2bb7c07363c9a44ea63fe96c827a34e296b8188c Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..9a8ac703dcd1b00c37aa6f8dc9a8a9b3d42145f6 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar new file mode 100644 index 0000000000000000000000000000000000000000..f0bdebeb94bce461c49ded7e28d6e6c63bf6a367 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b3c8ff0adc42f592363a883cd691d292aada837 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa7fdabb307af8221e7e0a1526f2c97d6ba35ec4 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..49953e8fa25ed42f4127011561a6e84869fe5d82 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..d139727d04b6b6acdfcb520566c8c60cbbcb7fb1 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6077c71e50c276649060a8fac39f6384fa67019 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d2e53ffaa9f0685843fd2b35fe18afd543249a Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c9b38d9c315bae8eb8c7d4eeacb26a8f2ca16085 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..50933a1fdc4d2f285542845bb89f9b34cef192f2 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..45581840b5a7e858949ee6198f8a1f7bd772fb32 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar new file mode 100644 index 0000000000000000000000000000000000000000..e3432492d5d204a1745e138497d9ede28ffb854e Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d2ade3e92ab51059b64ce3026377e1bd5991777 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ed4bb45df0ea4a6cb61ce9c7247b6de29ab4f98a Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..816ea17eb611606001129921b297615bcd2419fd Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..ffc43704764cd535486c28401c92f2548bd52c5f Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..0e9fcc76760c4fc3fe5fbce69a4a37783d912a11 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..d7987d7c1b270f153557179abaf61c87ed62e875 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d218bacf88c766dba04ef14130fd7e69181ffed Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e90b7796c1a5d7264bc613050e05f20a674c010 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa3474de5d48916f2ded456deab5671a1375645b Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..657afffffdf6324a62474b9499dd50cf9529609f Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..26cbae47e5f0db078fc43501ed3d1b4f95a1befa Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a66a92ba5247b5a1f8e201d11643e4fb11c2a8c Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..8da196f20fde587682295ac0c90f31ba4ab23815 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..31cf0b60867242d385d764dcea99adadf7ed6ded Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..89c644b8e286e9da107d81de25f1be0fe6447607 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..d530cd128ec0d314490c0e1e5ef68479cd23d366 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..091b4d13a417d635f5a1d7a42b482f88a6f3bf65 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..e58c483f97589c9712eda2273a37e25344573390 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar new file mode 100644 index 0000000000000000000000000000000000000000..a2cb8020a5afda869b487e2f9d172dcd1e9795bf Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..511ff3211d9b29bce06c3576ddcf0139fc874bb0 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..7707e5878b8525da8750949186a3ab1056ecb5c5 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..275dd5700a389ba1902a02d49e465157942368ce Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/quarkus-app-dependencies.txt b/scorpio-broker/EntityManager/target/quarkus-app/quarkus-app-dependencies.txt new file mode 100644 index 0000000000000000000000000000000000000000..0aa0b1a4e39e9ac14c9739186a382f5a7784a7d6 --- /dev/null +++ b/scorpio-broker/EntityManager/target/quarkus-app/quarkus-app-dependencies.txt @@ -0,0 +1,258 @@ +com.aayushatharva.brotli4j:brotli4j::jar:1.16.0 +com.aayushatharva.brotli4j:native-linux-x86_64::jar:1.16.0 +com.aayushatharva.brotli4j:service::jar:1.16.0 +com.cronutils:cron-utils::jar:9.2.1 +com.fasterxml.jackson.core:jackson-annotations::jar:2.17.2 +com.fasterxml.jackson.core:jackson-core::jar:2.17.2 +com.fasterxml.jackson.core:jackson-databind::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-toml::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-yaml::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jdk8::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jsr310::jar:2.17.2 +com.fasterxml.jackson.module:jackson-module-parameter-names::jar:2.17.2 +com.github.ben-manes.caffeine:caffeine::jar:3.1.5 +com.github.filosganga:geogson-core::jar:1.2.21 +com.github.filosganga:geogson-jts::jar:1.2.21 +com.github.luben:zstd-jni::jar:1.5.6-3 +com.google.code.gson:gson::jar:2.11.0 +com.google.errorprone:error_prone_annotations::jar:2.30.0 +com.google.guava:failureaccess::jar:1.0.1 +com.google.guava:guava::jar:33.2.1-jre +com.google.j2objc:j2objc-annotations::jar:2.8 +com.ongres.scram:client::jar:2.1 +com.ongres.scram:common::jar:2.1 +com.ongres.stringprep:saslprep::jar:1.1 +com.ongres.stringprep:stringprep::jar:1.1 +com.vividsolutions:jts-core::jar:1.14.0 +commons-codec:commons-codec::jar:1.17.1 +commons-io:commons-io::jar:2.16.1 +eu.neclab.ngsildbroker:commons::jar:5.0.5-SNAPSHOT +io.agroal:agroal-api::jar:2.5 +io.agroal:agroal-narayana::jar:2.5 +io.agroal:agroal-pool::jar:2.5 +io.github.crac:org-crac::jar:0.1.3 +io.netty:netty-buffer::jar:4.1.111.Final +io.netty:netty-codec-dns::jar:4.1.111.Final +io.netty:netty-codec-haproxy::jar:4.1.111.Final +io.netty:netty-codec-http2::jar:4.1.111.Final +io.netty:netty-codec-http::jar:4.1.111.Final +io.netty:netty-codec-mqtt::jar:4.1.111.Final +io.netty:netty-codec-socks::jar:4.1.111.Final +io.netty:netty-codec::jar:4.1.111.Final +io.netty:netty-common::jar:4.1.111.Final +io.netty:netty-handler-proxy::jar:4.1.111.Final +io.netty:netty-handler::jar:4.1.111.Final +io.netty:netty-resolver-dns::jar:4.1.111.Final +io.netty:netty-resolver::jar:4.1.111.Final +io.netty:netty-transport-classes-epoll::jar:4.1.111.Final +io.netty:netty-transport-classes-kqueue::jar:4.1.111.Final +io.netty:netty-transport-native-epoll:linux-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-kqueue:osx-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-unix-common::jar:4.1.111.Final +io.netty:netty-transport::jar:4.1.111.Final +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api-incubator::jar:2.5.0-alpha +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api::jar:2.5.0 +io.opentelemetry.semconv:opentelemetry-semconv::jar:1.26.0-alpha +io.opentelemetry:opentelemetry-api-incubator::jar:1.39.0-alpha +io.opentelemetry:opentelemetry-api::jar:1.39.0 +io.opentelemetry:opentelemetry-context::jar:1.39.0 +io.quarkiverse.loggingmanager:quarkus-logging-manager::jar:3.1.2 +io.quarkiverse.systemd.notify:quarkus-systemd-notify::jar:1.0.2 +io.quarkus.arc:arc::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common-types::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-jackson::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-vertx::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive::jar:3.14.1 +io.quarkus.security:quarkus-security::jar:2.1.0 +io.quarkus.vertx.utils:quarkus-vertx-utils::jar:3.14.1 +io.quarkus:quarkus-agroal::jar:3.14.1 +io.quarkus:quarkus-arc::jar:3.14.1 +io.quarkus:quarkus-bootstrap-runner::jar:3.14.1 +io.quarkus:quarkus-cache-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-cache::jar:3.14.1 +io.quarkus:quarkus-caffeine::jar:3.14.1 +io.quarkus:quarkus-classloader-commons::jar:3.14.1 +io.quarkus:quarkus-container-image-docker-common::jar:3.14.1 +io.quarkus:quarkus-container-image-docker::jar:3.14.1 +io.quarkus:quarkus-container-image::jar:3.14.1 +io.quarkus:quarkus-core::jar:3.14.1 +io.quarkus:quarkus-credentials::jar:3.14.1 +io.quarkus:quarkus-datasource-common::jar:3.14.1 +io.quarkus:quarkus-datasource::jar:3.14.1 +io.quarkus:quarkus-development-mode-spi::jar:3.14.1 +io.quarkus:quarkus-flyway-postgresql::jar:3.14.1 +io.quarkus:quarkus-flyway::jar:3.14.1 +io.quarkus:quarkus-fs-util::jar:0.0.10 +io.quarkus:quarkus-info-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-info::jar:3.14.1 +io.quarkus:quarkus-jackson::jar:3.14.1 +io.quarkus:quarkus-jdbc-postgresql::jar:3.14.1 +io.quarkus:quarkus-jsonp::jar:3.14.1 +io.quarkus:quarkus-kafka-client::jar:3.14.1 +io.quarkus:quarkus-messaging-kafka::jar:3.14.1 +io.quarkus:quarkus-messaging-kotlin::jar:3.14.1 +io.quarkus:quarkus-messaging::jar:3.14.1 +io.quarkus:quarkus-mutiny-reactive-streams-operators::jar:3.14.1 +io.quarkus:quarkus-mutiny::jar:3.14.1 +io.quarkus:quarkus-narayana-jta::jar:3.14.1 +io.quarkus:quarkus-netty::jar:3.14.1 +io.quarkus:quarkus-reactive-datasource::jar:3.14.1 +io.quarkus:quarkus-reactive-pg-client::jar:3.14.1 +io.quarkus:quarkus-rest-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson::jar:3.14.1 +io.quarkus:quarkus-rest::jar:3.14.1 +io.quarkus:quarkus-scheduler-api::jar:3.14.1 +io.quarkus:quarkus-scheduler-common::jar:3.14.1 +io.quarkus:quarkus-scheduler-kotlin::jar:3.14.1 +io.quarkus:quarkus-scheduler-spi::jar:3.14.1 +io.quarkus:quarkus-scheduler::jar:3.14.1 +io.quarkus:quarkus-security-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-smallrye-context-propagation::jar:3.14.1 +io.quarkus:quarkus-smallrye-health::jar:3.14.1 +io.quarkus:quarkus-smallrye-metrics::jar:3.14.1 +io.quarkus:quarkus-smallrye-openapi::jar:3.14.1 +io.quarkus:quarkus-swagger-ui::jar:3.14.1 +io.quarkus:quarkus-tls-registry::jar:3.14.1 +io.quarkus:quarkus-transaction-annotations::jar:3.14.1 +io.quarkus:quarkus-vertx-http::jar:3.14.1 +io.quarkus:quarkus-vertx-latebound-mdc-provider::jar:3.14.1 +io.quarkus:quarkus-vertx::jar:3.14.1 +io.quarkus:quarkus-virtual-threads::jar:3.14.1 +io.smallrye.common:smallrye-common-annotation::jar:2.5.0 +io.smallrye.common:smallrye-common-classloader::jar:2.5.0 +io.smallrye.common:smallrye-common-constraint::jar:2.5.0 +io.smallrye.common:smallrye-common-cpu::jar:2.5.0 +io.smallrye.common:smallrye-common-expression::jar:2.5.0 +io.smallrye.common:smallrye-common-function::jar:2.5.0 +io.smallrye.common:smallrye-common-io::jar:2.5.0 +io.smallrye.common:smallrye-common-net::jar:2.5.0 +io.smallrye.common:smallrye-common-os::jar:2.5.0 +io.smallrye.common:smallrye-common-ref::jar:2.5.0 +io.smallrye.common:smallrye-common-vertx-context::jar:2.5.0 +io.smallrye.config:smallrye-config-common::jar:3.9.1 +io.smallrye.config:smallrye-config-core::jar:3.9.1 +io.smallrye.config:smallrye-config::jar:3.9.1 +io.smallrye.reactive:mutiny-reactive-streams-operators::jar:2.6.2 +io.smallrye.reactive:mutiny-smallrye-context-propagation::jar:2.6.2 +io.smallrye.reactive:mutiny-zero-flow-adapters::jar:1.1.0 +io.smallrye.reactive:mutiny-zero::jar:1.1.0 +io.smallrye.reactive:mutiny::jar:2.6.2 +io.smallrye.reactive:smallrye-mutiny-vertx-auth-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-bridge-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-core::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-mqtt::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-pg-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-runtime::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-sql-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-uri-template::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web::jar:3.14.0 +io.smallrye.reactive:smallrye-reactive-converter-api::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-converter-mutiny::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-messaging-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-camel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-health::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-in-memory::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-otel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-provider::jar:4.24.0 +io.smallrye.reactive:vertx-mutiny-generator::jar:3.14.0 +io.smallrye:jandex::jar:3.2.2 +io.smallrye:smallrye-context-propagation-api::jar:2.1.2 +io.smallrye:smallrye-context-propagation-jta::jar:2.1.2 +io.smallrye:smallrye-context-propagation-storage::jar:2.1.2 +io.smallrye:smallrye-context-propagation::jar:2.1.2 +io.smallrye:smallrye-fault-tolerance-vertx::jar:6.4.0 +io.smallrye:smallrye-health-api::jar:4.1.0 +io.smallrye:smallrye-health-provided-checks::jar:4.1.0 +io.smallrye:smallrye-health::jar:4.1.0 +io.smallrye:smallrye-metrics::jar:4.0.0 +io.smallrye:smallrye-open-api-core::jar:3.10.0 +io.vertx:vertx-auth-common::jar:4.5.9 +io.vertx:vertx-bridge-common::jar:4.5.9 +io.vertx:vertx-codegen::jar:4.5.9 +io.vertx:vertx-core::jar:4.5.9 +io.vertx:vertx-mqtt::jar:4.5.9 +io.vertx:vertx-pg-client::jar:4.5.9 +io.vertx:vertx-sql-client::jar:4.5.9 +io.vertx:vertx-uri-template::jar:4.5.9 +io.vertx:vertx-web-client::jar:4.5.9 +io.vertx:vertx-web-common::jar:4.5.9 +io.vertx:vertx-web::jar:4.5.9 +jakarta.activation:jakarta.activation-api::jar:2.1.3 +jakarta.annotation:jakarta.annotation-api::jar:3.0.0 +jakarta.el:jakarta.el-api::jar:5.0.1 +jakarta.enterprise:jakarta.enterprise.cdi-api::jar:4.1.0 +jakarta.enterprise:jakarta.enterprise.lang-model::jar:4.1.0 +jakarta.inject:jakarta.inject-api::jar:2.0.1 +jakarta.interceptor:jakarta.interceptor-api::jar:2.2.0 +jakarta.json:jakarta.json-api::jar:2.1.3 +jakarta.resource:jakarta.resource-api::jar:2.1.0 +jakarta.transaction:jakarta.transaction-api::jar:2.0.1 +jakarta.ws.rs:jakarta.ws.rs-api::jar:3.1.0 +jakarta.xml.bind:jakarta.xml.bind-api::jar:4.0.2 +org.apache.camel.quarkus:camel-quarkus-core::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-reactive-streams::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-smallrye-reactive-messaging::jar:3.14.0 +org.apache.camel:camel-api::jar:4.7.0 +org.apache.camel:camel-base-engine::jar:4.7.0 +org.apache.camel:camel-base::jar:4.7.0 +org.apache.camel:camel-componentdsl::jar:4.7.0 +org.apache.camel:camel-core-catalog::jar:4.7.0 +org.apache.camel:camel-core-engine::jar:4.7.0 +org.apache.camel:camel-core-languages::jar:4.7.0 +org.apache.camel:camel-core-model::jar:4.7.0 +org.apache.camel:camel-core-processor::jar:4.7.0 +org.apache.camel:camel-core-reifier::jar:4.7.0 +org.apache.camel:camel-endpointdsl::jar:4.7.0 +org.apache.camel:camel-main::jar:4.7.0 +org.apache.camel:camel-management-api::jar:4.7.0 +org.apache.camel:camel-microprofile-config::jar:4.7.0 +org.apache.camel:camel-reactive-streams::jar:4.7.0 +org.apache.camel:camel-support::jar:4.7.0 +org.apache.camel:camel-tooling-model::jar:4.7.0 +org.apache.camel:camel-util-json::jar:4.7.0 +org.apache.camel:camel-util::jar:4.7.0 +org.apache.camel:camel-xml-jaxp-util::jar:4.7.0 +org.apache.commons:commons-lang3::jar:3.14.0 +org.apache.httpcomponents:httpclient-cache::jar:4.5.14 +org.apache.httpcomponents:httpclient::jar:4.5.14 +org.apache.httpcomponents:httpcore::jar:4.4.16 +org.apache.kafka:kafka-clients::jar:3.7.1 +org.checkerframework:checker-qual::jar:3.46.0 +org.eclipse.microprofile.config:microprofile-config-api::jar:3.1 +org.eclipse.microprofile.context-propagation:microprofile-context-propagation-api::jar:1.3 +org.eclipse.microprofile.health:microprofile-health-api::jar:4.0.1 +org.eclipse.microprofile.metrics:microprofile-metrics-api::jar:4.0.1 +org.eclipse.microprofile.openapi:microprofile-openapi-api::jar:3.1.1 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-api::jar:3.0 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-core::jar:3.0 +org.eclipse.parsson:parsson::jar:1.1.7 +org.flywaydb:flyway-core::jar:10.17.1 +org.flywaydb:flyway-database-postgresql::jar:10.17.1 +org.glassfish.expressly:expressly::jar:5.0.0 +org.jboss.invocation:jboss-invocation::jar:2.0.0.Final +org.jboss.logging:commons-logging-jboss-logging::jar:1.0.0.Final +org.jboss.logging:jboss-logging-annotations::jar:3.0.1.Final +org.jboss.logging:jboss-logging::jar:3.6.0.Final +org.jboss.logmanager:jboss-logmanager::jar:3.0.6.Final +org.jboss.narayana.jta:narayana-jta::jar:7.0.2.Final +org.jboss.narayana.jts:narayana-jts-integration::jar:7.0.2.Final +org.jboss.slf4j:slf4j-jboss-logmanager::jar:2.0.0.Final +org.jboss.threads:jboss-threads::jar:3.6.1.Final +org.jboss:jboss-transaction-spi::jar:8.0.0.Final +org.jctools:jctools-core::jar:4.0.5 +org.locationtech.jts:jts-core::jar:1.18.2 +org.locationtech.spatial4j:spatial4j::jar:0.8 +org.lz4:lz4-java::jar:1.8.0 +org.noggit:noggit::jar:0.8 +org.postgresql:postgresql::jar:42.7.4 +org.reactivestreams:reactive-streams::jar:1.0.4 +org.slf4j:slf4j-api::jar:2.0.6 +org.wildfly.common:wildfly-common::jar:1.7.0.Final +org.xerial.snappy:snappy-java::jar:1.1.10.5 +org.yaml:snakeyaml::jar:2.2 diff --git a/scorpio-broker/EntityManager/target/quarkus-app/quarkus-run.jar b/scorpio-broker/EntityManager/target/quarkus-app/quarkus-run.jar new file mode 100644 index 0000000000000000000000000000000000000000..0910187ca4538af4235d288a5afb76c234c0c5ce Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/quarkus-run.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/quarkus/generated-bytecode.jar b/scorpio-broker/EntityManager/target/quarkus-app/quarkus/generated-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..d3d73f93a64ca2bd2d9e42912b70855c683ad246 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/quarkus/generated-bytecode.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/quarkus/quarkus-application.dat b/scorpio-broker/EntityManager/target/quarkus-app/quarkus/quarkus-application.dat new file mode 100644 index 0000000000000000000000000000000000000000..bbd45906a9c3f5a6657e9ca4598254470f27ff77 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/quarkus/quarkus-application.dat differ diff --git a/scorpio-broker/EntityManager/target/quarkus-app/quarkus/transformed-bytecode.jar b/scorpio-broker/EntityManager/target/quarkus-app/quarkus/transformed-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..b6a17631e634a659b76206a07af54c803a5a0745 Binary files /dev/null and b/scorpio-broker/EntityManager/target/quarkus-app/quarkus/transformed-bytecode.jar differ diff --git a/scorpio-broker/EntityManager/target/quarkus-artifact.properties b/scorpio-broker/EntityManager/target/quarkus-artifact.properties new file mode 100644 index 0000000000000000000000000000000000000000..f0dd1db8c01b870871b7887c8d4178781f5deee5 --- /dev/null +++ b/scorpio-broker/EntityManager/target/quarkus-artifact.properties @@ -0,0 +1,4 @@ +# Generated by Quarkus - Do not edit manually +metadata.container-image=scorpiobroker/entity-manager\:5.0.5-SNAPSHOT +metadata.pull-required=false +type=jar-container diff --git a/scorpio-broker/EntityManager/target/test-classes/eu/neclab/ngsildbroker/entityhandler/controller/CustomProfile.class b/scorpio-broker/EntityManager/target/test-classes/eu/neclab/ngsildbroker/entityhandler/controller/CustomProfile.class new file mode 100644 index 0000000000000000000000000000000000000000..ea2b4d30fd24495ee01f100c8a1ed1bbda846b40 Binary files /dev/null and b/scorpio-broker/EntityManager/target/test-classes/eu/neclab/ngsildbroker/entityhandler/controller/CustomProfile.class differ diff --git a/scorpio-broker/EntityManager/target/test-classes/eu/neclab/ngsildbroker/entityhandler/controller/EntityBatchControllerTest.class b/scorpio-broker/EntityManager/target/test-classes/eu/neclab/ngsildbroker/entityhandler/controller/EntityBatchControllerTest.class new file mode 100644 index 0000000000000000000000000000000000000000..904a5d9160c895baf9ab10f03809d70acbcfbd06 Binary files /dev/null and b/scorpio-broker/EntityManager/target/test-classes/eu/neclab/ngsildbroker/entityhandler/controller/EntityBatchControllerTest.class differ diff --git a/scorpio-broker/EntityManager/target/test-classes/eu/neclab/ngsildbroker/entityhandler/controller/EntityControllerTest.class b/scorpio-broker/EntityManager/target/test-classes/eu/neclab/ngsildbroker/entityhandler/controller/EntityControllerTest.class new file mode 100644 index 0000000000000000000000000000000000000000..14c2d10f6f3510d14025cc2230ab6e4f8cfeeca7 Binary files /dev/null and b/scorpio-broker/EntityManager/target/test-classes/eu/neclab/ngsildbroker/entityhandler/controller/EntityControllerTest.class differ diff --git a/scorpio-broker/EntityManager/target/test-classes/eu/neclab/ngsildbroker/entityhandler/services/EntityServiceTest.class b/scorpio-broker/EntityManager/target/test-classes/eu/neclab/ngsildbroker/entityhandler/services/EntityServiceTest.class new file mode 100644 index 0000000000000000000000000000000000000000..ce7cbb58c2a4e4473d0c05cc76f90e0d4f5c5362 Binary files /dev/null and b/scorpio-broker/EntityManager/target/test-classes/eu/neclab/ngsildbroker/entityhandler/services/EntityServiceTest.class differ diff --git a/scorpio-broker/Examples/Scorpio Broker_latest.postman_collection.json b/scorpio-broker/Examples/Scorpio Broker_latest.postman_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..8c5dfc1dec1e687edf074d712a7b4ab84a1f2aef --- /dev/null +++ b/scorpio-broker/Examples/Scorpio Broker_latest.postman_collection.json @@ -0,0 +1,2188 @@ +{ + "info": { + "_postman_id": "cbf80864-a29e-4bda-b15f-c5c3ff4932e3", + "name": "Scorpio Broker_latest", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" + }, + "item": [ + { + "name": "Entities", + "item": [ + { + "name": "Create entity with Link header", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n\t\"id\": \"urn:ngsi-ld:Vehicle:A100\",\r\n\t\"type\": \"Vehicle\",\r\n\t\"brandName\": {\r\n\t\t\"type\": \"Property\",\r\n\t\t\"value\": \"Mercedes\"\r\n\t},\r\n\t\"isParked\": {\r\n\t\t\"type\": \"Relationship\",\r\n\t\t\"object\": \"urn:ngsi-ld:OffStreetParking:Downtown1\",\r\n\t\t\"observedAt\": \"2017-07-29T12:00:04\",\r\n\t\t\"providedBy\": {\r\n\t\t\t\"type\": \"Relationship\",\r\n\t\t\t\"object\": \"urn:ngsi-ld:Person:Bob\"\r\n\t\t}\r\n\t},\r\n\t\"speed\": {\r\n\t\t\"type\": \"Property\",\r\n\t\t\"value\": 80\r\n\t},\r\n\t\"createdAt\": \"2017-07-29T12:00:04\",\r\n\t\"location\": {\r\n\t\t\"type\": \"GeoProperty\",\r\n\t\t\"value\": {\r\n\t\t\t\"type\": \"Point\",\r\n\t\t\t\"coordinates\": [-8.5, 41.2]\r\n\t\t}\r\n\t}\r\n}" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities/", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ] + } + }, + "response": [] + }, + { + "name": "Create entity - context in payload", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n\t\"@context\": [{\r\n\t\t\"Vehicle\": \"http://example.org/vehicle/Vehicle\",\r\n\t\t\"brandName\": \"http://example.org/vehicle/brandName\",\r\n\t\t\"speed\": \"http://example.org/vehicle/speed\",\r\n\t\t\"isParked\": {\r\n\t\t\t\"@type\": \"@id\",\r\n\t\t\t\"@id\": \"http://example.org/common/isParked\"\r\n\t\t},\r\n\t\t\"providedBy\": {\r\n\t\t\t\"@type\": \"@id\",\r\n\t\t\t\"@id\": \"http://example.org/common/providedBy\"\r\n\t\t}\r\n\t}],\r\n\t\"id\": \"urn:ngsi-ld:Vehicle:A4580\",\r\n\t\"type\": \"Vehicle\",\r\n\t\"brandName\": {\r\n\t\t\"type\": \"Property\",\r\n\t\t\"value\": \"Mercedes\"\r\n\t},\r\n\t\"isParked\": {\r\n\t\t\"type\": \"Relationship\",\r\n\t\t\"object\": \"urn:ngsi-ld:OffStreetParking:Downtown1\",\r\n\t\t\"observedAt\": \"2017-07-29T12:00:04\",\r\n\t\t\"providedBy\": {\r\n\t\t\t\"type\": \"Relationship\",\r\n\t\t\t\"object\": \"urn:ngsi-ld:Person:Bob\"\r\n\t\t}\r\n\t},\r\n\t\"speed\": {\r\n\t\t\"type\": \"Property\",\r\n\t\t\"value\": 80\r\n\t},\r\n\t\"createdAt\": \"2017-07-29T12:00:04\",\r\n\t\"location\": {\r\n\t\t\"type\": \"GeoProperty\",\r\n\t\t\"value\": {\r\n\t\t\t\"type\": \"Polygon\",\r\n\t\t\t\"coordinates\": [\r\n\t\t\t\t[\r\n\t\t\t\t\t[8.686752319335938, 49.359122687528746],\r\n\t\t\t\t\t[8.742027282714844, 49.3642654834877],\r\n\t\t\t\t\t[8.767433166503904, 49.398462568451485],\r\n\t\t\t\t\t[8.768119812011719, 49.42750021620163],\r\n\t\t\t\t\t[8.74305725097656, 49.44781634951542],\r\n\t\t\t\t\t[8.669242858886719, 49.43754770762113],\r\n\t\t\t\t\t[8.63525390625, 49.41968407776289],\r\n\t\t\t\t\t[8.637657165527344, 49.3995797187007],\r\n\t\t\t\t\t[8.663749694824219, 49.36851347448498],\r\n\t\t\t\t\t[8.686752319335938, 49.359122687528746]\r\n\t\t\t\t]\r\n\t\t\t]\r\n\t\t}\r\n\t}\r\n}" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities/", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ] + } + }, + "response": [] + }, + { + "name": "Append Entity request", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "name": "Content-Type", + "value": "application/json", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n\t\"@context\": {\r\n\t\t\"brandName1\": \"http://example.org/vehicle/brandName1\"\r\n\t},\r\n\t\"brandName1\": {\r\n\t\t\"type\": \"Property\",\r\n\t\t\"value\": \"BMW\"\r\n\t}\r\n}" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities/urn:ngsi-ld:Vehicle:A100/attrs", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Vehicle:A100", + "attrs" + ] + }, + "description": "Add new attributes in existing entity" + }, + "response": [] + }, + { + "name": "Update entity request", + "request": { + "method": "PATCH", + "header": [ + { + "key": "Content-Type", + "name": "Content-Type", + "value": "application/json", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n\t\"@context\": {\r\n\t\t\"brandName1\": \"http://example.org/vehicle/brandName1\"\r\n\t},\r\n\t\"brandName1\": {\r\n\t\t\"type\": \"Property\",\r\n\t\t\"value\": \"AUDI\"\r\n\t}\r\n}" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities/urn:ngsi-ld:Vehicle:A100/attrs", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Vehicle:A100", + "attrs" + ] + }, + "description": "update the attribute of entity" + }, + "response": [] + }, + { + "name": "partial update request", + "request": { + "method": "PATCH", + "header": [ + { + "key": "Content-Type", + "name": "Content-Type", + "value": "application/json", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n\t\"@context\": {\r\n\t\t\"brandName\": \"http://example.org/vehicle/brandName\"\r\n\t},\r\n\t\"value\": \"BMW\"\r\n}" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities/urn:ngsi-ld:Vehicle:A100/attrs/brandName", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Vehicle:A100", + "attrs", + "brandName" + ] + }, + "description": "partially update the attribute of entity" + }, + "response": [] + }, + { + "name": "Delete attribute", + "request": { + "method": "DELETE", + "header": [], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities/urn:ngsi-ld:Vehicle:A100/attrs/brandName1", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Vehicle:A100", + "attrs", + "brandName1" + ] + } + }, + "response": [] + }, + { + "name": "Delete entity", + "request": { + "method": "DELETE", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities/urn:ngsi-ld:Vehicle:A100", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Vehicle:A100" + ] + } + }, + "response": [] + }, + { + "name": "Create entity expanded", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"http://example.org/vehicle/brandName\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": \"Mercedes\"\r\n }\r\n ]\r\n }\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/createdAt\": [\r\n {\r\n \"@type\": \"http://uri.etsi.org/ngsi-ld/DateTime\",\r\n \"@value\": \"2017-07-29T12:00:04\"\r\n }\r\n ],\r\n \"@id\": \"urn:ngsi-ld:Vehicle:A8866\",\r\n \"http://example.org/common/isParked\": [\r\n {\r\n \"http://uri.etsi.org/ngsi-ld/hasObject\": [\r\n {\r\n \"@id\": \"urn:ngsi-ld:OffStreetParking:Downtown1\"\r\n }\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/observedAt\": [\r\n {\r\n \"@type\": \"http://uri.etsi.org/ngsi-ld/DateTime\",\r\n \"@value\": \"2017-07-29T12:00:04\"\r\n }\r\n ],\r\n \"http://example.org/common/providedBy\": [\r\n {\r\n \"http://uri.etsi.org/ngsi-ld/hasObject\": [\r\n {\r\n \"@id\": \"urn:ngsi-ld:Person:Bob\"\r\n }\r\n ],\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Relationship\"\r\n ]\r\n }\r\n ],\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Relationship\"\r\n ]\r\n }\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/location\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/GeoProperty\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": \"{ \\\"type\\\":\\\"Point\\\", \\\"coordinates\\\":[ -8.5, 41.2 ] }\"\r\n }\r\n ]\r\n }\r\n ],\r\n \"http://example.org/vehicle/speed\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": 80\r\n }\r\n ]\r\n }\r\n ],\r\n \"@type\": [\r\n \"http://example.org/vehicle/Vehicle\"\r\n ]\r\n}" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities/", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ] + } + }, + "response": [] + }, + { + "name": "Create entity all_datatypes", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"@context\": [\r\n {\r\n \"numberExample\": \"http://example.org/numberExample\",\r\n \"stringExample\": \"http://example.org/stringExample\",\r\n \"dateTimeExample\": \"http://example.org/dateTimeExample\",\r\n \"dateExample\": \"http://example.org/dateExample\",\r\n \"timeExample\": \"http://example.org/timeExample\",\r\n \"otherValueExample\": \"http://example.org/otherValueExample\",\r\n \"trueExample\": \"http://example.org/trueExample\",\r\n \"falseExample\": \"http://example.org/falseExample\",\r\n \"nullExample\": \"http://example.org/nullExample\",\r\n \"uriExample\": \"http://example.org/uriExample\",\r\n \"structuredExample1\": \"http://example.org/structuredExample1\",\r\n \"structuredExample2\": \"http://example.org/structuredExample2\",\r\n \"topLevelExample\": \"http://example.org/topLevelExample\",\r\n \"subPropertyExample\": \"http://example.org/subPropertyExample\",\r\n \"relationshipExample\": \"http://example.org/relationshipExample\"\r\n }\r\n ],\r\n \"id\": \"urn:ngsi-ld:Test:all_datatypes\",\r\n \"type\": \"urn:ngsi-ld:Test\",\r\n \"numberExample\": {\r\n \"type\": \"Property\",\r\n \"value\": 100\r\n },\r\n \"stringExample\": {\r\n \"type\": \"Property\",\r\n \"value\": \"Mercedes\"\r\n },\r\n \"dateTimeExample\": {\r\n \"type\": \"Property\",\r\n \"value\": \"TODO\"\r\n },\r\n \"dateExample\": {\r\n \"type\": \"Property\",\r\n \"value\": \"TODO\"\r\n },\r\n \"timeExample\": {\r\n \"type\": \"Property\",\r\n \"value\": \"TODO\"\r\n },\r\n \"otherValueExample\": {\r\n \"type\": \"Property\",\r\n \"value\": true\r\n },\r\n \"trueExample\": {\r\n \"type\": \"Property\",\r\n \"value\": true\r\n },\r\n \"falseExample\": {\r\n \"type\": \"Property\",\r\n \"value\": false\r\n },\r\n \"nullExample\": {\r\n \"type\": \"Property\",\r\n \"value\": null\r\n },\r\n \"uriExample\": {\r\n \"type\": \"Property\",\r\n \"value\": \"http://www.example.com\"\r\n },\r\n \"topLevelExample\": {\r\n \"type\": \"Property\",\r\n \"value\": 10,\r\n \"subPropertyExample\": {\r\n \"type\": \"Property\",\r\n \"value\": 5\r\n }\r\n },\r\n \"relationshipExample\": {\r\n \"type\": \"Relationship\",\r\n \"object\": \"urn:ngsi-ld:relationshipExample\"\r\n }\r\n}" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities/", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ] + } + }, + "response": [] + }, + { + "name": "Create entity all_datatypes expanded", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"http://example.org/dateExample\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": \"TODO\"\r\n }\r\n ]\r\n }\r\n ],\r\n \"http://example.org/dateTimeExample\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": \"TODO\"\r\n }\r\n ]\r\n }\r\n ],\r\n \"http://example.org/falseExample\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": false\r\n }\r\n ]\r\n }\r\n ],\r\n \"@id\": \"urn:ngsi-ld:Test:all_datatypes\",\r\n \"http://example.org/nullExample\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ]\r\n }\r\n ],\r\n \"http://example.org/numberExample\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": 100\r\n }\r\n ]\r\n }\r\n ],\r\n \"http://example.org/otherValueExample\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": true\r\n }\r\n ]\r\n }\r\n ],\r\n \"http://example.org/relationshipExample\": [\r\n {\r\n \"http://uri.etsi.org/ngsi-ld/hasObject\": [\r\n {\r\n \"@id\": \"urn:ngsi-ld:relationshipExample\"\r\n }\r\n ],\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Relationship\"\r\n ]\r\n }\r\n ],\r\n \"http://example.org/stringExample\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": \"Mercedes\"\r\n }\r\n ]\r\n }\r\n ],\r\n \"http://example.org/structuredExample1\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {},\r\n {}\r\n ]\r\n }\r\n ],\r\n \"http://example.org/structuredExample2\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@id\": \"urn:ngsi-ld:Test:street\",\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": \"Franklinstrasse\"\r\n }\r\n ]\r\n },\r\n {\r\n \"@id\": \"urn:ngsi-ld:Test:number\",\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": 55\r\n }\r\n ]\r\n }\r\n ]\r\n }\r\n ],\r\n \"http://example.org/timeExample\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": \"TODO\"\r\n }\r\n ]\r\n }\r\n ],\r\n \"http://example.org/topLevelExample\": [\r\n {\r\n \"http://example.org/subPropertyExample\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": 5\r\n }\r\n ]\r\n }\r\n ],\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": 10\r\n }\r\n ]\r\n }\r\n ],\r\n \"http://example.org/trueExample\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": true\r\n }\r\n ]\r\n }\r\n ],\r\n \"@type\": [\r\n \"urn:ngsi-ld:Test\"\r\n ],\r\n \"http://example.org/uriExample\": [\r\n {\r\n \"@type\": [\r\n \"http://uri.etsi.org/ngsi-ld/Property\"\r\n ],\r\n \"http://uri.etsi.org/ngsi-ld/hasValue\": [\r\n {\r\n \"@value\": \"http://www.example.com\"\r\n }\r\n ]\r\n }\r\n ]\r\n }" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities/", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {} + }, + { + "name": "Queries", + "item": [ + { + "name": "Retrieve specific entity", + "request": { + "method": "GET", + "header": [ + { + "key": "Accept", + "name": "Content-Type", + "value": "application/ld+json", + "type": "text" + }, + { + "key": "Content-Type", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities/urn:ngsi-ld:Vehicle:A4569", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Vehicle:A4569" + ] + }, + "description": "Retrieve specific entity from kafka" + }, + "response": [] + }, + { + "name": "Retrieve by attribute", + "request": { + "method": "GET", + "header": [ + { + "key": "Accept", + "value": "application/ld+json", + "type": "text" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?attrs=http://example.org/vehicle/brandName", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "attrs", + "value": "http://example.org/vehicle/brandName" + } + ] + } + }, + "response": [] + }, + { + "name": "Query by id", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?id=urn:ngsi-ld:Vehicle:A4569&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "id", + "value": "urn:ngsi-ld:Vehicle:A4569" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + } + }, + "response": [] + }, + { + "name": "Query by type", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + } + }, + "response": [] + }, + { + "name": "Query by type using Link header", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?type=Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "type", + "value": "Vehicle" + } + ] + } + }, + "response": [] + }, + { + "name": "Query by idPattern", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?idPattern=urn:ngsi-ld:Vehicle:A.*&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "idPattern", + "value": "urn:ngsi-ld:Vehicle:A.*" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {} + }, + { + "name": "Advanced queries", + "item": [ + { + "name": "Number equal", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?q=numberExample==100&type=urn:ngsi-ld:Test", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "q", + "value": "numberExample==100" + }, + { + "key": "type", + "value": "urn:ngsi-ld:Test" + } + ] + } + }, + "response": [] + }, + { + "name": "Number greater", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?q=numberExample>99&type=urn:ngsi-ld:Test", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "q", + "value": "numberExample>99" + }, + { + "key": "type", + "value": "urn:ngsi-ld:Test" + } + ] + } + }, + "response": [] + }, + { + "name": "String equal", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?q=stringExample==\"Mercedes\"&type=urn:ngsi-ld:Test", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "q", + "value": "stringExample==\"Mercedes\"" + }, + { + "key": "type", + "value": "urn:ngsi-ld:Test" + } + ] + } + }, + "response": [] + }, + { + "name": "String unequal", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?q=stringExample!=\"Mercedes\"&type=urn:ngsi-ld:Test", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "q", + "value": "stringExample!=\"Mercedes\"" + }, + { + "key": "type", + "value": "urn:ngsi-ld:Test" + } + ] + } + }, + "response": [] + }, + { + "name": "bool false equal", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?q=falseExample==false&type=urn:ngsi-ld:Test", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "q", + "value": "falseExample==false" + }, + { + "key": "type", + "value": "urn:ngsi-ld:Test" + } + ] + } + }, + "response": [] + }, + { + "name": "bool true equal", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?q=trueExample==true&type=urn:ngsi-ld:Test", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "q", + "value": "trueExample==true" + }, + { + "key": "type", + "value": "urn:ngsi-ld:Test" + } + ] + } + }, + "response": [] + }, + { + "name": "URI equal", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?q=uriExample==http://www.example.com&type=urn:ngsi-ld:Test", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "q", + "value": "uriExample==http://www.example.com" + }, + { + "key": "type", + "value": "urn:ngsi-ld:Test" + } + ] + } + }, + "response": [] + }, + { + "name": "URI equal (using String)", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?q=uriExample==\"http://www.example.com\"&type=urn:ngsi-ld:Test", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "q", + "value": "uriExample==\"http://www.example.com\"" + }, + { + "key": "type", + "value": "urn:ngsi-ld:Test" + } + ] + } + }, + "response": [] + }, + { + "name": "andOp example", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?q=trueExample==true;numberExample<=130;stringExample==\"Mercedes\"&type=urn:ngsi-ld:Test", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "q", + "value": "trueExample==true;numberExample<=130;stringExample==\"Mercedes\"" + }, + { + "key": "type", + "value": "urn:ngsi-ld:Test" + } + ] + } + }, + "response": [] + }, + { + "name": "AttrPath (property of property)", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?q=topLevelExample.subPropertyExample>4&type=urn:ngsi-ld:Test", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "q", + "value": "topLevelExample.subPropertyExample>4" + }, + { + "key": "type", + "value": "urn:ngsi-ld:Test" + } + ] + } + }, + "response": [] + }, + { + "name": "AttrPath (relationship of relat...)", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?q=isParked.providedBy==urn:ngsi-ld:Person:Bob&type=urn:ngsi-ld:Test", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "q", + "value": "isParked.providedBy==urn:ngsi-ld:Person:Bob" + }, + { + "key": "type", + "value": "urn:ngsi-ld:Test" + } + ] + } + }, + "response": [] + }, + { + "name": "AttrPath (4 levels of properties)", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?q=manyLevelsExample.sub1.sub2.sub3==\"D\"&type=urn:ngsi-ld:Test", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "q", + "value": "manyLevelsExample.sub1.sub2.sub3==\"D\"" + }, + { + "key": "type", + "value": "urn:ngsi-ld:Test" + } + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {} + }, + { + "name": "Geoqueries", + "item": [ + { + "name": "Near", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "disabled": true + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?geoproperty=location&georel=near;maxDistance==360&geometry=Point&coordinates=%5B8.684783577919006%2C49.406131991436396%5D&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "geoproperty", + "value": "location" + }, + { + "key": "georel", + "value": "near;maxDistance==360" + }, + { + "key": "geometry", + "value": "Point" + }, + { + "key": "coordinates", + "value": "%5B8.684783577919006%2C49.406131991436396%5D" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + }, + "description": "Get every entity near NEC Labs Heidelberg, with distance up to 360 meters\r\n NEC Labs Heidelberg coordinates: 8.684783577919006, 49.406131991436396" + }, + "response": [] + }, + { + "name": "Near (parkingLotLocation property)", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?geoproperty=parkingLotLocation&georel=near;maxDistance==360&geometry=Point&coordinates=%5B8.684783577919006%2C49.406131991436396%5D&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "geoproperty", + "value": "parkingLotLocation" + }, + { + "key": "georel", + "value": "near;maxDistance==360" + }, + { + "key": "geometry", + "value": "Point" + }, + { + "key": "coordinates", + "value": "%5B8.684783577919006%2C49.406131991436396%5D" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + }, + "description": "Testing a property other than \"location\"" + }, + "response": [] + }, + { + "name": "Near minDistance", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?geoproperty=location&georel=near;minDistance==400&geometry=Point&coordinates=%5B8.684783577919006%2C49.406131991436396%5D&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "geoproperty", + "value": "location" + }, + { + "key": "georel", + "value": "near;minDistance==400" + }, + { + "key": "geometry", + "value": "Point" + }, + { + "key": "coordinates", + "value": "%5B8.684783577919006%2C49.406131991436396%5D" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + }, + "description": "Get every entity near NEC Labs Heidelberg, with distance over 400 meters" + }, + "response": [] + }, + { + "name": "Near Polygon", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?geoproperty=location&georel=near;maxDistance==50&geometry=Polygon&coordinates=%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "geoproperty", + "value": "location" + }, + { + "key": "georel", + "value": "near;maxDistance==50" + }, + { + "key": "geometry", + "value": "Polygon" + }, + { + "key": "coordinates", + "value": "%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + }, + "description": "Get every entity near NEC building (polygon), with distance up to 360 meters" + }, + "response": [] + }, + { + "name": "Within", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?geoproperty=location&georel=within&geometry=Polygon&coordinates=%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "geoproperty", + "value": "location" + }, + { + "key": "georel", + "value": "within" + }, + { + "key": "geometry", + "value": "Polygon" + }, + { + "key": "coordinates", + "value": "%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + }, + "description": "Get every entity within NEC Labs building (polygon) in Heidelberg" + }, + "response": [] + }, + { + "name": "Within MultiPolygon", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?geoproperty=location&georel=within&geometry=MultiPolygon&coordinates=%5B%5B%5B%5B8.686752319335938%2C49.359122687528746%5D%2C%5B8.742027282714844%2C49.3642654834877%5D%2C%5B8.767433166503904%2C49.398462568451485%5D%2C%5B8.768119812011719%2C49.42750021620163%5D%2C%5B8.74305725097656%2C49.44781634951542%5D%2C%5B8.669242858886719%2C49.43754770762113%5D%2C%5B8.63525390625%2C49.41968407776289%5D%2C%5B8.637657165527344%2C49.3995797187007%5D%2C%5B8.663749694824219%2C49.36851347448498%5D%2C%5B8.686752319335938%2C49.359122687528746%5D%5D%5D%2C%5B%5B%5B8.364715576171875%2C48.96939999849952%5D%2C%5B8.47320556640625%2C48.982019588328214%5D%2C%5B8.485565185546875%2C49.017157315497165%5D%2C%5B8.411407470703125%2C49.05677012268616%5D%2C%5B8.33587646484375%2C49.031565622700356%5D%2C%5B8.320770263671875%2C48.98562459864604%5D%2C%5B8.364715576171875%2C48.96939999849952%5D%5D%5D%5D&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "geoproperty", + "value": "location" + }, + { + "key": "georel", + "value": "within" + }, + { + "key": "geometry", + "value": "MultiPolygon" + }, + { + "key": "coordinates", + "value": "%5B%5B%5B%5B8.686752319335938%2C49.359122687528746%5D%2C%5B8.742027282714844%2C49.3642654834877%5D%2C%5B8.767433166503904%2C49.398462568451485%5D%2C%5B8.768119812011719%2C49.42750021620163%5D%2C%5B8.74305725097656%2C49.44781634951542%5D%2C%5B8.669242858886719%2C49.43754770762113%5D%2C%5B8.63525390625%2C49.41968407776289%5D%2C%5B8.637657165527344%2C49.3995797187007%5D%2C%5B8.663749694824219%2C49.36851347448498%5D%2C%5B8.686752319335938%2C49.359122687528746%5D%5D%5D%2C%5B%5B%5B8.364715576171875%2C48.96939999849952%5D%2C%5B8.47320556640625%2C48.982019588328214%5D%2C%5B8.485565185546875%2C49.017157315497165%5D%2C%5B8.411407470703125%2C49.05677012268616%5D%2C%5B8.33587646484375%2C49.031565622700356%5D%2C%5B8.320770263671875%2C48.98562459864604%5D%2C%5B8.364715576171875%2C48.96939999849952%5D%5D%5D%5D" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + }, + "description": "Get every entity within Heidelberg or Karlsruhe ( MultiPolygon )" + }, + "response": [] + }, + { + "name": "Contains", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?geoproperty=location&georel=contains&geometry=Point&coordinates=%5B8.684783577919006%2C49.406131991436396%5D&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "geoproperty", + "value": "location" + }, + { + "key": "georel", + "value": "contains" + }, + { + "key": "geometry", + "value": "Point" + }, + { + "key": "coordinates", + "value": "%5B8.684783577919006%2C49.406131991436396%5D" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + }, + "description": "Get every entity that contains NEC Labs Heidelberg (point)" + }, + "response": [] + }, + { + "name": "Overlaps", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?geoproperty=location&georel=overlaps&geometry=Polygon&coordinates=%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "geoproperty", + "value": "location" + }, + { + "key": "georel", + "value": "overlaps" + }, + { + "key": "geometry", + "value": "Polygon" + }, + { + "key": "coordinates", + "value": "%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + }, + "description": "Get every entity that overlaps with NEC Labs building (polygon) in Heidelberg" + }, + "response": [] + }, + { + "name": "Intersects", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?geoproperty=location&georel=overlaps&geometry=Polygon&coordinates=%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "geoproperty", + "value": "location" + }, + { + "key": "georel", + "value": "overlaps" + }, + { + "key": "geometry", + "value": "Polygon" + }, + { + "key": "coordinates", + "value": "%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + }, + "description": "Get every entity that intersects with NEC Labs building (polygon) in Heidelberg\r\n Intersects(g1, g2 ) == Not (Disjoint(g1, g2 ))" + }, + "response": [] + }, + { + "name": "Equals", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?geoproperty=location&georel=equals&geometry=Point&coordinates=%5B8.684783577919006%2C49.406131991436396%5D&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "geoproperty", + "value": "location" + }, + { + "key": "georel", + "value": "equals" + }, + { + "key": "geometry", + "value": "Point" + }, + { + "key": "coordinates", + "value": "%5B8.684783577919006%2C49.406131991436396%5D" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + }, + "description": "get every entity equal to NEC Labs Heidelberg point" + }, + "response": [] + }, + { + "name": "Disjoint", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/entities?geoproperty=location&georel=disjoint&geometry=Polygon&coordinates=%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "geoproperty", + "value": "location" + }, + { + "key": "georel", + "value": "disjoint" + }, + { + "key": "geometry", + "value": "Polygon" + }, + { + "key": "coordinates", + "value": "%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + }, + "description": "get entities that do not share any space together with NEC Labs Heidelberg building" + }, + "response": [] + } + ], + "protocolProfileBehavior": {} + }, + { + "name": "CSource", + "item": [ + { + "name": "CSource queries", + "item": [ + { + "name": "Query by type", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "disabled": true + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/csourceRegistrations?type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "csourceRegistrations" + ], + "query": [ + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + }, + "description": "Get every entity near NEC Labs Heidelberg, with distance up to 360 meters\r\n NEC Labs Heidelberg coordinates: 8.684783577919006, 49.406131991436396" + }, + "response": [] + }, + { + "name": "Query by type using Link header", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/csourceRegistrations?type=Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "csourceRegistrations" + ], + "query": [ + { + "key": "type", + "value": "Vehicle" + } + ] + } + }, + "response": [] + }, + { + "name": "Query by type+id", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/csourceRegistrations?id=urn:ngsi-ld:Vehicle:C1234&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "csourceRegistrations" + ], + "query": [ + { + "key": "id", + "value": "urn:ngsi-ld:Vehicle:C1234" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + } + }, + "response": [] + }, + { + "name": "Query by type+idPattern", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/csourceRegistrations?idPattern=urn:ngsi-ld:Vehicle:C.*&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "csourceRegistrations" + ], + "query": [ + { + "key": "idPattern", + "value": "urn:ngsi-ld:Vehicle:C.*" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + } + }, + "response": [] + }, + { + "name": "Geoquery Near min", + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "disabled": true + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/csourceRegistrations?geoproperty=location&georel=near;minDistance==8000&geometry=Polygon&coordinates=%5B%20%5B%20%5B%208.400421142578125%2C%2049.32333182991094%20%5D%2C%20%5B%208.812408447265625%2C%2049.32333182991094%20%5D%2C%20%5B%208.812408447265625%2C%2049.49489061140408%20%5D%2C%20%5B%208.400421142578125%2C%2049.49489061140408%20%5D%2C%20%5B%208.400421142578125%2C%2049.32333182991094%20%5D%20%5D%20%5D&type=http://example.org/vehicle/Vehicle", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "csourceRegistrations" + ], + "query": [ + { + "key": "geoproperty", + "value": "location" + }, + { + "key": "georel", + "value": "near;minDistance==8000" + }, + { + "key": "geometry", + "value": "Polygon" + }, + { + "key": "coordinates", + "value": "%5B%20%5B%20%5B%208.400421142578125%2C%2049.32333182991094%20%5D%2C%20%5B%208.812408447265625%2C%2049.32333182991094%20%5D%2C%20%5B%208.812408447265625%2C%2049.49489061140408%20%5D%2C%20%5B%208.400421142578125%2C%2049.49489061140408%20%5D%2C%20%5B%208.400421142578125%2C%2049.32333182991094%20%5D%20%5D%20%5D" + }, + { + "key": "type", + "value": "http://example.org/vehicle/Vehicle" + } + ] + }, + "description": "Get every entity near NEC Labs Heidelberg, with distance up to 360 meters\r\n NEC Labs Heidelberg coordinates: 8.684783577919006, 49.406131991436396" + }, + "response": [] + } + ], + "protocolProfileBehavior": {}, + "_postman_isSubFolder": true + }, + { + "name": "Create csource with Link header", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json" + }, + { + "key": "Accept", + "value": "application/ld+json" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"id\": \"urn:ngsi-ld:ContextSourceRegistration:csr1a3458\",\r\n \"type\": \"ContextSourceRegistration\",\r\n \"name\": \"NameExample\",\r\n \"description\": \"DescriptionExample\",\r\n \"information\": [\r\n {\r\n \"entities\": [\r\n {\r\n \"id\": \"urn:ngsi-ld:Vehicle:A456\",\r\n \"type\": \"Vehicle\"\r\n }\r\n ],\r\n \"properties\": [\r\n \"brandName\",\r\n \"speed\"\r\n ],\r\n \"relationships\": [\r\n \"isParked\"\r\n ]\r\n },\r\n {\r\n \"entities\": [\r\n {\r\n \"idPattern\": \".*downtown$\",\r\n \"type\": \"OffStreetParking\"\r\n }\r\n ]\r\n }\r\n ],\r\n \"endpoint\": \"http://my.csource.org:1026\",\r\n \"location\": \"{ \\\"type\\\": \\\"Polygon\\\", \\\"coordinates\\\": [[[8.686752319335938,49.359122687528746],[8.742027282714844,49.3642654834877],[8.767433166503904,49.398462568451485],[8.768119812011719,49.42750021620163],[8.74305725097656,49.44781634951542],[8.669242858886719,49.43754770762113],[8.63525390625,49.41968407776289],[8.637657165527344,49.3995797187007],[8.663749694824219,49.36851347448498],[8.686752319335938,49.359122687528746]]] }\",\r\n \"timestamp\": {\r\n \"start\": \"2017-11-29T14:53:15\"\r\n },\r\n \"expires\": \"2030-11-29T14:53:15\"\r\n}" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/csourceRegistrations/", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "csourceRegistrations", + "" + ] + } + }, + "response": [] + }, + { + "name": "Create csource with context in payload", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "name": "Content-Type", + "value": "application/json", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"id\": \"urn:ngsi-ld:ContextSourceRegistration:csr1a3458\",\r\n \"type\": \"ContextSourceRegistration\",\r\n \"name\": \"NameExample\",\r\n \"description\": \"DescriptionExample\",\r\n \"information\": [\r\n {\r\n \"entities\": [\r\n {\r\n \"id\": \"urn:ngsi-ld:Vehicle:A456\",\r\n \"type\": \"Vehicle\"\r\n }\r\n ],\r\n \"properties\": [\r\n \"brandName\",\r\n \"speed\"\r\n ],\r\n \"relationships\": [\r\n \"isParked\"\r\n ]\r\n },\r\n {\r\n \"entities\": [\r\n {\r\n \"idPattern\": \".*downtown$\",\r\n \"type\": \"OffStreetParking\"\r\n }\r\n ]\r\n }\r\n ],\r\n \"endpoint\": \"http://my.csource.org:1026\",\r\n \"location\": \"{ \\\"type\\\": \\\"Polygon\\\", \\\"coordinates\\\": [[[8.686752319335938,49.359122687528746],[8.742027282714844,49.3642654834877],[8.767433166503904,49.398462568451485],[8.768119812011719,49.42750021620163],[8.74305725097656,49.44781634951542],[8.669242858886719,49.43754770762113],[8.63525390625,49.41968407776289],[8.637657165527344,49.3995797187007],[8.663749694824219,49.36851347448498],[8.686752319335938,49.359122687528746]]] }\",\r\n \"timestamp\": {\r\n \"start\": \"2017-11-29T14:53:15\"\r\n },\r\n \"expires\": \"2030-11-29T14:53:15\",\r\n\"@context\": [\r\n\r\n\t\"https://forge.etsi.org/gitlab/NGSI-LD/NGSI-LD/raw/master/coreContext/ngsi-ld-core-context.jsonld\", \r\n {\r\n \"Vehicle\": \"http://example.org/vehicle/Vehicle\",\r\n \"brandName\": \"http://example.org/vehicle/brandName\",\r\n \"brandName1\": \"http://example.org/vehicle/brandName1\",\r\n \"speed\": \"http://example.org/vehicle/speed\",\r\n \"totalSpotNumber\": \"http://example.org/parking/totalSpotNumber\",\r\n \"reliability\": \"http://example.org/common/reliability\",\r\n \"OffStreetParking\": \"http://example.org/parking/OffStreetParking\", \r\n \"availableSpotNumber\": \"http://example.org/parking/availableSpotNumber\",\r\n \"timestamp\": \"http://uri.etsi.org/ngsi-ld/timestamp\",\r\n \"isParked\": {\r\n \"@type\": \"@id\",\r\n \"@id\": \"http://example.org/common/isParked\"\r\n },\r\n \"isNextToBuilding\": { \r\n \"@type\": \"@id\", \r\n \"@id\": \"http://example.org/common/isNextToBuilding\" \r\n }, \r\n \"providedBy\": { \r\n \"@type\": \"@id\", \r\n \"@id\": \"http://example.org/common/providedBy\" \r\n }, \r\n \"name\": \"http://example.org/common/name\" \r\n}\r\n]\r\n}\r\n" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/csourceRegistrations/", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "csourceRegistrations", + "" + ] + } + }, + "response": [] + }, + { + "name": "update csource", + "request": { + "method": "PATCH", + "header": [ + { + "key": "Content-Type", + "name": "Content-Type", + "value": "application/json", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"id\": \"urn:ngsi-ld:ContextSourceRegistration:csr1a3458\",\r\n \"type\": \"ContextSourceRegistration\",\r\n \"name\": \"NameExample\",\r\n \"description\": \"DescriptionExample\",\r\n \"information\": [\r\n {\r\n \"entities\": [\r\n {\r\n \"id\": \"urn:ngsi-ld:Vehicle:A456\",\r\n \"type\": \"Vehicle\"\r\n }\r\n ],\r\n \"properties\": [\r\n \"brandName\",\r\n \"speed\",\r\n \"brandName1\"\r\n ],\r\n \"relationships\": [\r\n \"isParked\"\r\n ]\r\n },\r\n {\r\n \"entities\": [\r\n {\r\n \"idPattern\": \".*downtown$\",\r\n \"type\": \"OffStreetParking\"\r\n }\r\n ]\r\n }\r\n ],\r\n \"endpoint\": \"http://my.csource.org:1026\",\r\n \"location\": \"{ \\\"type\\\": \\\"Polygon\\\", \\\"coordinates\\\": [[[8.686752319335938,49.359122687528746],[8.742027282714844,49.3642654834877],[8.767433166503904,49.398462568451485],[8.768119812011719,49.42750021620163],[8.74305725097656,49.44781634951542],[8.669242858886719,49.43754770762113],[8.63525390625,49.41968407776289],[8.637657165527344,49.3995797187007],[8.663749694824219,49.36851347448498],[8.686752319335938,49.359122687528746]]] }\",\r\n \"timestamp\": {\r\n \"start\": \"2017-11-29T14:53:15\"\r\n },\r\n \"expires\": \"2030-11-29T14:53:15\",\r\n\"@context\": [\r\n\r\n \"https://forge.etsi.org/gitlab/NGSI-LD/NGSI-LD/raw/master/coreContext/ngsi-ld-core-context.jsonld\", \r\n {\r\n \"Vehicle\": \"http://example.org/vehicle/Vehicle\",\r\n \"brandName\": \"http://example.org/vehicle/brandName\",\r\n \"brandName1\": \"http://example.org/vehicle/brandName1\",\r\n \"speed\": \"http://example.org/vehicle/speed\",\r\n \"totalSpotNumber\": \"http://example.org/parking/totalSpotNumber\",\r\n \"reliability\": \"http://example.org/common/reliability\",\r\n \"OffStreetParking\": \"http://example.org/parking/OffStreetParking\", \r\n \"availableSpotNumber\": \"http://example.org/parking/availableSpotNumber\", \r\n \"isParked\": {\r\n \"@type\": \"@id\",\r\n \"@id\": \"http://example.org/common/isParked\"\r\n },\r\n \"isNextToBuilding\": { \r\n \"@type\": \"@id\", \r\n \"@id\": \"http://example.org/common/isNextToBuilding\" \r\n }, \r\n \"providedBy\": { \r\n \"@type\": \"@id\", \r\n \"@id\": \"http://example.org/common/providedBy\" \r\n }, \r\n \"name\": \"http://example.org/common/name\",\r\n \"timestamp\": \"http://uri.etsi.org/ngsi-ld/timestamp\",\r\n \"expires\":\"http://uri.etsi.org/ngsi-ld/expires\"\r\n}\r\n]\r\n}\r\n" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/csourceRegistrations/urn:ngsi-ld:ContextSourceRegistration:csr1a3458", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "csourceRegistrations", + "urn:ngsi-ld:ContextSourceRegistration:csr1a3458" + ] + }, + "description": "update attribute in csource" + }, + "response": [] + }, + { + "name": "delete csource", + "request": { + "method": "DELETE", + "header": [], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/csourceRegistrations/urn:ngsi-ld:ContextSourceRegistration:csr1a3458", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "csourceRegistrations", + "urn:ngsi-ld:ContextSourceRegistration:csr1a3458" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {} + }, + { + "name": "Subscription", + "item": [ + { + "name": "Create subscription with link header", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "name": "Content-Type", + "value": "application/ld+json", + "type": "text" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Accept", + "value": "application/json", + "type": "text", + "disabled": true + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n\t\"type\": \"Subscription\",\r\n\t\"entities\": [{\r\n\t\t\"idPattern\": \".*\",\r\n\t\t\"type\": \"Vehicle\"\r\n\t}],\r\n\t\"watchedAttributes\": [\"brandName\"],\r\n\t\"notification\": {\r\n\t\t\"attributes\": [\"brandName\"],\r\n\t\t\"format\": \"keyValues\",\r\n\t\t\"endpoint\": {\r\n\t\t\t\"uri\": \"http://my.endpoint.org/notify\",\r\n\t\t\t\"accept\": \"application/json\"\r\n\t\t}\r\n\t}\r\n}" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/subscriptions/", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "subscriptions", + "" + ] + }, + "description": "create subscription with context in payload" + }, + "response": [] + }, + { + "name": "Query all subscriptions", + "request": { + "method": "GET", + "header": [ + { + "key": "Accept", + "value": "application/ld+json", + "type": "text" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/subscriptions/", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "subscriptions", + "" + ] + }, + "description": "for retreiving specific subscription" + }, + "response": [] + }, + { + "name": "Retrieve specific subscription", + "request": { + "method": "GET", + "header": [ + { + "key": "Accept", + "value": "application/ld+json", + "type": "text" + } + ], + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/subscriptions/urn:ngsi-ld:Subscription:71", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "subscriptions", + "urn:ngsi-ld:Subscription:71" + ] + } + }, + "response": [] + }, + { + "name": "Update subscription", + "request": { + "method": "PATCH", + "header": [ + { + "key": "Content-Type", + "name": "Content-Type", + "value": "application/ld+json", + "type": "text" + }, + { + "key": "Link", + "value": "<{{link}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "\t{\r\n\t \"id\": \"urn:ngsi-ld:Subscription:7\",\r\n\t \"type\": \"Subscription\",\r\n\t \"entities\": [{\r\n\t \"type\": \"Vehicle\"\r\n\t }],\r\n\t \"watchedAttributes\": [\"http://example.org/vehicle/brandName2\"],\r\n\t \"q\":\"http://example.org/vehicle/brandName2!=Mercedes\",\r\n\t \"notification\": {\r\n\t \"attributes\": [\"http://example.org/vehicle/brandName2\"],\r\n\t \"format\": \"keyValues\",\r\n\t \"endpoint\": {\r\n\t \"uri\": \"http://my.endpoint.org/notify\",\r\n\t\r\n\t \"accept\": \"application/json\"\r\n\t }\r\n\t }\r\n\t}" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/subscriptions/urn:ngsi-ld:Subscription:7", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "subscriptions", + "urn:ngsi-ld:Subscription:7" + ] + } + }, + "response": [] + }, + { + "name": "Delete subscription", + "request": { + "method": "DELETE", + "header": [], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "{{gatewayServer}}/ngsi-ld/v1/subscriptions/urn:ngsi-ld:Subscription:7", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "ngsi-ld", + "v1", + "subscriptions", + "urn:ngsi-ld:Subscription:7" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {} + }, + { + "name": "Microservice status", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "{{RegistryServer}}", + "host": [ + "{{RegistryServer}}" + ] + } + }, + "response": [] + }, + { + "name": "API Gateway", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "{{gatewayServer}}/actuator/health", + "host": [ + "{{gatewayServer}}" + ], + "path": [ + "actuator", + "health" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {} +} \ No newline at end of file diff --git a/scorpio-broker/Examples/dummycontext.json b/scorpio-broker/Examples/dummycontext.json new file mode 100644 index 0000000000000000000000000000000000000000..a718805ac503bdf9e3e88ab98b9f423dbd077fc2 --- /dev/null +++ b/scorpio-broker/Examples/dummycontext.json @@ -0,0 +1,243 @@ +{ + + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "id": "@id", + "type": "@type", + "value": "https://uri.etsi.org/ngsi-ld/hasValue", + "object": { + "@id": "https://uri.etsi.org/ngsi-ld/hasObject", + "@type": "@id" + }, + "testedAt": { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@id": "http://example.org/test/P1" + }, + "Property": "https://uri.etsi.org/ngsi-ld/Property", + "Relationship": "https://uri.etsi.org/ngsi-ld/Relationship", + "DateTime": "https://uri.etsi.org/ngsi-ld/DateTime", + "Date": "https://uri.etsi.org/ngsi-ld/Date", + "Time": "https://uri.etsi.org/ngsi-ld/Time", + "createdAt": { + "@id": "https://uri.etsi.org/ngsi-ld/createdAt", + "@type": "DateTime" + }, + "modifiedAt": { + "@id": "https://uri.etsi.org/ngsi-ld/modifiedAt", + "@type": "DateTime" + }, + "observedAt": { + "@id": "https://uri.etsi.org/ngsi-ld/observedAt", + "@type": "DateTime" + }, + "datasetId": { + "@id": "https://uri.etsi.org/ngsi-ld/datasetId", + "@type": "@id" + }, + "instanceId": { + "@id": "https://uri.etsi.org/ngsi-ld/instanceId", + "@type": "@id" + }, + "unitCode": "https://uri.etsi.org/ngsi-ld/unitCode", + "location": "https://uri.etsi.org/ngsi-ld/location", + "observationSpace": "https://uri.etsi.org/ngsi-ld/observationSpace", + "operationSpace": "https://uri.etsi.org/ngsi-ld/operationSpace", + "GeoProperty": "https://uri.etsi.org/ngsi-ld/GeoProperty", + "TemporalProperty": "https://uri.etsi.org/ngsi-ld/TemporalProperty", + "ContextSourceRegistration": "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration", + "Subscription": "https://uri.etsi.org/ngsi-ld/Subscription", + "Notification": "https://uri.etsi.org/ngsi-ld/Notification", + "ContextSourceNotification": "https://uri.etsi.org/ngsi-ld/ContextSourceNotification", + "title": "https://uri.etsi.org/ngsi-ld/title", + "detail": "https://uri.etsi.org/ngsi-ld/detail", + "idPattern": "https://uri.etsi.org/ngsi-ld/idPattern", + "name": "https://uri.etsi.org/ngsi-ld/name", + "description": "https://uri.etsi.org/ngsi-ld/description", + "information": "https://uri.etsi.org/ngsi-ld/information", + "observationInterval": "https://uri.etsi.org/ngsi-ld/observationInterval", + "managementInterval": "https://uri.etsi.org/ngsi-ld/managementInterval", + "expires": { + "@id": "https://uri.etsi.org/ngsi-ld/expires", + "@type": "DateTime" + }, + "endpoint": "https://uri.etsi.org/ngsi-ld/endpoint", + "entities": "https://uri.etsi.org/ngsi-ld/entities", + "properties": { + "@id": "https://uri.etsi.org/ngsi-ld/properties", + "@type": "@vocab" + }, + "relationships": { + "@id": "https://uri.etsi.org/ngsi-ld/relationships", + "@type": "@vocab" + }, + "start": { + "@id": "https://uri.etsi.org/ngsi-ld/start", + "@type": "DateTime" + }, + "end": { + "@id": "https://uri.etsi.org/ngsi-ld/end", + "@type": "DateTime" + }, + "watchedAttributes": { + "@id": "https://uri.etsi.org/ngsi-ld/watchedAttributes", + "@type": "@vocab" + }, + "timeInterval": "https://uri.etsi.org/ngsi-ld/timeInterval", + "q": "https://uri.etsi.org/ngsi-ld/q", + "geoQ": "https://uri.etsi.org/ngsi-ld/geoQ", + "csf": "https://uri.etsi.org/ngsi-ld/csf", + "isActive": "https://uri.etsi.org/ngsi-ld/isActive", + "notification": "https://uri.etsi.org/ngsi-ld/notification", + "status": "https://uri.etsi.org/ngsi-ld/status", + "throttling": "https://uri.etsi.org/ngsi-ld/throttling", + "temporalQ": "https://uri.etsi.org/ngsi-ld/temporalQ", + "geometry": "https://uri.etsi.org/ngsi-ld/geometry", + "coordinates": "https://uri.etsi.org/ngsi-ld/coordinates", + "georel": "https://uri.etsi.org/ngsi-ld/georel", + "geoproperty": "https://uri.etsi.org/ngsi-ld/geoproperty", + "attributes": { + "@id": "https://uri.etsi.org/ngsi-ld/attributes", + "@type": "@vocab" + }, + "format": "https://uri.etsi.org/ngsi-ld/format", + "timesSent": "https://uri.etsi.org/ngsi-ld/timesSent", + "lastNotification": { + "@id": "https://uri.etsi.org/ngsi-ld/lastNotification", + "@type": "DateTime" + }, + "lastFailure": { + "@id": "https://uri.etsi.org/ngsi-ld/lastFailure", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "https://uri.etsi.org/ngsi-ld/lastSuccess", + "@type": "DateTime" + }, + "uri": "https://uri.etsi.org/ngsi-ld/uri", + "accept": "https://uri.etsi.org/ngsi-ld/accept", + "success": { + "@id": "https://uri.etsi.org/ngsi-ld/success", + "@type": "@id" + }, + "errors": "https://uri.etsi.org/ngsi-ld/errors", + "error": "https://uri.etsi.org/ngsi-ld/error", + "entityId": { + "@id": "https://uri.etsi.org/ngsi-ld/entityId", + "@type": "@id" + }, + "updated": "https://uri.etsi.org/ngsi-ld/updated", + "unchanged": "https://uri.etsi.org/ngsi-ld/unchanged", + "attributeName": "https://uri.etsi.org/ngsi-ld/attributeName", + "reason": "https://uri.etsi.org/ngsi-ld/reason", + "timerel": "https://uri.etsi.org/ngsi-ld/timerel", + "time": { + "@id": "https://uri.etsi.org/ngsi-ld/time", + "@type": "DateTime" + }, + "endTime": { + "@id": "https://uri.etsi.org/ngsi-ld/endTime", + "@type": "DateTime" + }, + "timeproperty": "https://uri.etsi.org/ngsi-ld/timeproperty", + "subscriptionId": { + "@id": "https://uri.etsi.org/ngsi-ld/subscriptionId", + "@type": "@id" + }, + "notifiedAt": { + "@id": "https://uri.etsi.org/ngsi-ld/notifiedAt", + "@type": "DateTime" + }, + "data": "https://uri.etsi.org/ngsi-ld/data", + "triggerReason": "https://uri.etsi.org/ngsi-ld/triggerReason", + "values": { + "@id": "https://uri.etsi.org/ngsi-ld/hasValues", + "@container": "@list" + }, + "objects": { + "@id": "https://uri.etsi.org/ngsi-ld/hasObjects", + "@type": "@id", + "@container": "@list" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/", + "schema": "https://schema.org/", + "rdfs": "http://www.w3.org/2000/01/rdf-schema#", + "xsd": "http://www.w3.org/2001/XMLSchema#", + "fiware": "https://uri.fiware.org/ns/data-models#", + "tutorial": "https://fiware.github.io/tutorials.Step-by-Step/schema/", + "Building": "fiware:Building", + "Product": "tutorial:Product", + "Shelf": "tutorial:Shelf", + "StockOrder": "tutorial:StockOrder", + "Person": "schema:Person", + "address": "schema:address", + "category": { + "@id": "fiware:category", + "@type": "@vocab" + }, + "commercial": "fiware:commercial", + "office": "fiware:office", + "industrial": "fiware:industrial", + "retail": "fiware:retail", + "residential": "fiware:residential", + "containedInPlace": "fiware:containedInPlace", + "dataProvider": "fiware:dataProvider", + "floorsAboveGround": "fiware:floorsAboveGround", + "floorsBelowGround": "fiware:floorsBelowGround", + "tweets": "tutorial:tweets", + "owner": { + "@id": "fiware:owner", + "@type": "@id" + }, + "occupier": { + "@id": "fiware:occupier", + "@type": "@id" + }, + "refMap": "fiware:refMap", + "source": "fiware:source", + "installedBy": { + "@id": "tutorial:installedBy", + "@type": "@id" + }, + "locatedIn": { + "@id": "tutorial:locatedIn", + "@type": "@id" + }, + "maxCapacity": { + "@id": "tutorial:maxCapacity", + "@type": "xsd:integer" + }, + "statusOfWork": { + "@id": "tutorial:statusOfWork", + "@type": "@vocab" + }, + "requested": "tutorial:requested", + "completed": "tutorial:completed", + "scheduled": "tutorial:scheduled", + "inProgress": "tutorial:inProgress", + + "stocks": "tutorial:stocks", + "currency": "tutorial:currency", + "price": "tutorial:price", + "size": "tutorial:size", + "furniture": { + "@id": "tutorial:furniture", + "@type": "@id" + }, + "requestedFor": { + "@id": "tutorial:requestedFor", + "@type": "@id" + }, + "requestedBy": { + "@id": "tutorial:requestedBy", + "@type": "@id" + }, + "orderedProduct": { + "@id": "tutorial:orderedProduct", + "@type": "@id" + }, + "orderDate": "tutorial:orderDate", + "stockCount": "tutorial:stockCount", + "numberOfItems": "tutorial:numberOfItems" + } + +} diff --git a/scorpio-broker/Examples/fiwaredatamodel.json b/scorpio-broker/Examples/fiwaredatamodel.json new file mode 100644 index 0000000000000000000000000000000000000000..03b2cf3aafdcf45dd3e27781a216e41863403c0e --- /dev/null +++ b/scorpio-broker/Examples/fiwaredatamodel.json @@ -0,0 +1,1532 @@ +{ + "@context": { + "coordinates": { + "@id": "https://purl.org/geojson/vocab#coordinates", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "https://purl.org/geojson/vocab#bbox" + }, + "Point": "https://purl.org/geojson/vocab#Point", + "areaServed": "http://schema.org/areaServed", + "circuit": "https://uri.fiware.org/ns/datamodels/circuit", + "refStreetlightModel": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refStreetlightModel" + }, + "refStreetlightControlCabinet": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refStreetlightControlCabinet" + }, + "status": "https://uri.fiware.org/ns/datamodels/status", + "powerState": "https://uri.fiware.org/ns/datamodels/powerState", + "refDevice": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refDevice" + }, + "refStreetlightGroup": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refStreetlightGroup" + }, + "dateLastLampChange": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateLastLampChange" + }, + "dateLastSwitchingOn": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateLastSwitchingOn" + }, + "dateLastSwitchingOff": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateLastSwitchingOff" + }, + "controllingMethod": "https://uri.fiware.org/ns/datamodels/controllingMethod", + "dateServiceStarted": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateServiceStarted" + }, + "image": "https://uri.fiware.org/ns/datamodels/image", + "description": "http://schema.org/description", + "annotations": "https://uri.fiware.org/ns/datamodels/annotations", + "locationCategory": "https://uri.fiware.org/ns/datamodels/locationCategory", + "laternHeight": "https://uri.fiware.org/ns/datamodels/laternHeight", + "illuminanceLevel": "https://uri.fiware.org/ns/datamodels/illuminanceLevel", + "Streetlight": "https://uri.fiware.org/ns/datamodels/Streetlight", + "ok": "https://uri.fiware.org/ns/datamodels/ok", + "defectiveLamp": "https://uri.fiware.org/ns/datamodels/defectiveLamp", + "columnIssue": "https://uri.fiware.org/ns/datamodels/columnIssue", + "brokenLantern": "https://uri.fiware.org/ns/datamodels/brokenLantern", + "on": "https://uri.fiware.org/ns/datamodels/on", + "off": "https://uri.fiware.org/ns/datamodels/off", + "low": "https://uri.fiware.org/ns/datamodels/low", + "bootingUp": "https://uri.fiware.org/ns/datamodels/bootingUp", + "group": "https://uri.fiware.org/ns/datamodels/group", + "individual": "https://uri.fiware.org/ns/datamodels/individual", + "fa\u00e7ade": "https://uri.fiware.org/ns/datamodels/fa\u00e7ade", + "sidewalk": "https://uri.fiware.org/ns/datamodels/sidewalk", + "pedestrianPath": "https://uri.fiware.org/ns/datamodels/pedestrianPath", + "road": "https://uri.fiware.org/ns/datamodels/road", + "playground": "https://uri.fiware.org/ns/datamodels/playground", + "park": "https://uri.fiware.org/ns/datamodels/park", + "garden": "https://uri.fiware.org/ns/datamodels/garden", + "bridge": "https://uri.fiware.org/ns/datamodels/bridge", + "tunnel": "https://uri.fiware.org/ns/datamodels/tunnel", + "parking": "https://uri.fiware.org/ns/datamodels/parking", + "centralIsland": "https://uri.fiware.org/ns/datamodels/centralIsland", + "name": "http://schema.org/name", + "alternateName": "https://uri.fiware.org/ns/datamodels/alternateName", + "maxPowerConsumption": "https://uri.fiware.org/ns/datamodels/maxPowerConsumption", + "minPowerConsumption": "https://uri.fiware.org/ns/datamodels/minPowerConsumption", + "columnBrandName": "https://uri.fiware.org/ns/datamodels/columnBrandName", + "columnModelName": "https://uri.fiware.org/ns/datamodels/columnModelName", + "columnManufacturerName": "https://uri.fiware.org/ns/datamodels/columnManufacturerName", + "columnMadeOf": "https://uri.fiware.org/ns/datamodels/columnMadeOf", + "columnColor": "https://uri.fiware.org/ns/datamodels/columnColor", + "lanternModelName": "https://uri.fiware.org/ns/datamodels/lanternModelName", + "lanternBrandName": "https://uri.fiware.org/ns/datamodels/lanternBrandName", + "lanternManufacturerName": "https://uri.fiware.org/ns/datamodels/lanternManufacturerName", + "lanternWeight": "https://uri.fiware.org/ns/datamodels/lanternWeight", + "lampModelName": "https://uri.fiware.org/ns/datamodels/lampModelName", + "lampBrandName": "https://uri.fiware.org/ns/datamodels/lampBrandName", + "lampManufacturerName": "https://uri.fiware.org/ns/datamodels/lampManufacturerName", + "lampWeight": "https://uri.fiware.org/ns/datamodels/lampWeight", + "workingLife": "https://uri.fiware.org/ns/datamodels/workingLife", + "lampTechnology": "https://uri.fiware.org/ns/datamodels/lampTechnology", + "colorTemperature": "https://uri.fiware.org/ns/datamodels/colorTemperature", + "colorRenderingIndex": "https://uri.fiware.org/ns/datamodels/colorRenderingIndex", + "luminousFlux": "https://uri.fiware.org/ns/datamodels/luminousFlux", + "powerConsumption": "https://uri.fiware.org/ns/datamodels/powerConsumption", + "compliantWith": "https://uri.fiware.org/ns/datamodels/compliantWith", + "category": "https://uri.fiware.org/ns/datamodels/category", + "StreetlightModel": "https://uri.fiware.org/ns/datamodels/StreetlightModel", + "steel": "https://uri.fiware.org/ns/datamodels/steel", + "aluminium": "https://uri.fiware.org/ns/datamodels/aluminium", + "wood": "https://uri.fiware.org/ns/datamodels/wood", + "other": "https://uri.fiware.org/ns/datamodels/other", + "LED": "https://uri.fiware.org/ns/datamodels/LED", + "LPS": "https://uri.fiware.org/ns/datamodels/LPS", + "HPS": "https://uri.fiware.org/ns/datamodels/HPS", + "postTop": "https://uri.fiware.org/ns/datamodels/postTop", + "bollard": "https://uri.fiware.org/ns/datamodels/bollard", + "lamppost": "https://uri.fiware.org/ns/datamodels/lamppost", + "lightTower": "https://uri.fiware.org/ns/datamodels/lightTower", + "flashingBeacon": "https://uri.fiware.org/ns/datamodels/flashingBeacon", + "sideEntry": "https://uri.fiware.org/ns/datamodels/sideEntry", + "signLight": "https://uri.fiware.org/ns/datamodels/signLight", + "ornamentalLantern": "https://uri.fiware.org/ns/datamodels/ornamentalLantern", + "switchingOnHours": "https://uri.fiware.org/ns/datamodels/switchingOnHours", + "switchingMode": "https://uri.fiware.org/ns/datamodels/switchingMode", + "activeProgramId": "https://uri.fiware.org/ns/datamodels/activeProgramId", + "refStreetlight": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refStreetlight" + }, + "StreetlightGroup": "https://uri.fiware.org/ns/datamodels/StreetlightGroup", + "night-ON": "https://uri.fiware.org/ns/datamodels/night-ON", + "night-OFF": "https://uri.fiware.org/ns/datamodels/night-OFF", + "night-LOW": "https://uri.fiware.org/ns/datamodels/night-LOW", + "always-ON": "https://uri.fiware.org/ns/datamodels/always-ON", + "day-ON": "https://uri.fiware.org/ns/datamodels/day-ON", + "day-OFF": "https://uri.fiware.org/ns/datamodels/day-OFF", + "day-LOW": "https://uri.fiware.org/ns/datamodels/day-LOW", + "serialNumber": "https://uri.fiware.org/ns/datamodels/serialNumber", + "brandName": "https://uri.fiware.org/ns/datamodels/brandName", + "modelName": "https://uri.fiware.org/ns/datamodels/modelName", + "manufacturerName": "https://uri.fiware.org/ns/datamodels/manufacturerName", + "cupboardMadeOf": "https://uri.fiware.org/ns/datamodels/cupboardMadeOf", + "features": "https://uri.fiware.org/ns/datamodels/features", + "dateLastProgramming": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateLastProgramming" + }, + "nextActuationDeadline": "https://uri.fiware.org/ns/datamodels/nextActuationDeadline", + "responsible": "https://uri.fiware.org/ns/datamodels/responsible", + "workingMode": "https://uri.fiware.org/ns/datamodels/workingMode", + "maximumPowerAvailable": "https://uri.fiware.org/ns/datamodels/maximumPowerAvailable", + "energyConsumed": "https://uri.fiware.org/ns/datamodels/energyConsumed", + "energyCost": "https://uri.fiware.org/ns/datamodels/energyCost", + "reactiveEnergyConsumed": "https://uri.fiware.org/ns/datamodels/reactiveEnergyConsumed", + "dateMeteringStarted": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateMeteringStarted" + }, + "lastMeterReading": "https://uri.fiware.org/ns/datamodels/lastMeterReading", + "meterReadingPeriod": "https://uri.fiware.org/ns/datamodels/meterReadingPeriod", + "frequency": "https://uri.fiware.org/ns/datamodels/frequency", + "totalActivePower": "https://uri.fiware.org/ns/datamodels/totalActivePower", + "totalReactivePower": "https://uri.fiware.org/ns/datamodels/totalReactivePower", + "activePower": "https://uri.fiware.org/ns/datamodels/activePower", + "reactivePower": "https://uri.fiware.org/ns/datamodels/reactivePower", + "powerFactor": "https://uri.fiware.org/ns/datamodels/powerFactor", + "cosPhi": "https://uri.fiware.org/ns/datamodels/cosPhi", + "intensity": "https://uri.fiware.org/ns/datamodels/intensity", + "voltage": "https://uri.fiware.org/ns/datamodels/voltage", + "thdrVoltage": "https://uri.fiware.org/ns/datamodels/thdrVoltage", + "thdrIntensity": "https://uri.fiware.org/ns/datamodels/thdrIntensity", + "StreetlightControlCabinet": "https://uri.fiware.org/ns/datamodels/StreetlightControlCabinet", + "plastic": "https://uri.fiware.org/ns/datamodels/plastic", + "metal": "https://uri.fiware.org/ns/datamodels/metal", + "concrete": "https://uri.fiware.org/ns/datamodels/concrete", + "astronomicalClock": "https://uri.fiware.org/ns/datamodels/astronomicalClock", + "individualControl": "https://uri.fiware.org/ns/datamodels/individualControl", + "automatic": "https://uri.fiware.org/ns/datamodels/automatic", + "manual": "https://uri.fiware.org/ns/datamodels/manual", + "semiautomatic": "https://uri.fiware.org/ns/datamodels/semiautomatic", + "contactPoint": "https://uri.fiware.org/ns/datamodels/contactPoint", + "refSeeAlso": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refSeeAlso" + }, + "PointOfInterest": "https://uri.fiware.org/ns/datamodels/PointOfInterest", + "width": "https://uri.fiware.org/ns/datamodels/width", + "length": "https://uri.fiware.org/ns/datamodels/length", + "beachType": "https://uri.fiware.org/ns/datamodels/beachType", + "occupationRate": "https://uri.fiware.org/ns/datamodels/occupationRate", + "facilities": "https://uri.fiware.org/ns/datamodels/facilities", + "accessType": "https://uri.fiware.org/ns/datamodels/accessType", + "Beach": "https://uri.fiware.org/ns/datamodels/Beach", + "whiteSand": "https://uri.fiware.org/ns/datamodels/whiteSand", + "urban": "https://uri.fiware.org/ns/datamodels/urban", + "isolated": "https://uri.fiware.org/ns/datamodels/isolated", + "calmWaters": "https://uri.fiware.org/ns/datamodels/calmWaters", + "blueFlag": "https://uri.fiware.org/ns/datamodels/blueFlag", + "Q-Quality": "https://uri.fiware.org/ns/datamodels/Q-Quality", + "strongWaves": "https://uri.fiware.org/ns/datamodels/strongWaves", + "windy": "https://uri.fiware.org/ns/datamodels/windy", + "blackSand": "https://uri.fiware.org/ns/datamodels/blackSand", + "high": "https://uri.fiware.org/ns/datamodels/high", + "medium": "https://uri.fiware.org/ns/datamodels/medium", + "promenade": "https://uri.fiware.org/ns/datamodels/promenade", + "showers": "https://uri.fiware.org/ns/datamodels/showers", + "cleaningServices": "https://uri.fiware.org/ns/datamodels/cleaningServices", + "lifeGuard": "https://uri.fiware.org/ns/datamodels/lifeGuard", + "sunshadeRental": "https://uri.fiware.org/ns/datamodels/sunshadeRental", + "sunLoungerRental": "https://uri.fiware.org/ns/datamodels/sunLoungerRental", + "waterCraftRental": "https://uri.fiware.org/ns/datamodels/waterCraftRental", + "toilets": "https://uri.fiware.org/ns/datamodels/toilets", + "touristOffice": "https://uri.fiware.org/ns/datamodels/touristOffice", + "litterBins": "https://uri.fiware.org/ns/datamodels/litterBins", + "telephone": "https://uri.fiware.org/ns/datamodels/telephone", + "surfPracticeArea": "https://uri.fiware.org/ns/datamodels/surfPracticeArea", + "accessforDisabled": "https://uri.fiware.org/ns/datamodels/accessforDisabled", + "privateVehicle": "https://uri.fiware.org/ns/datamodels/privateVehicle", + "boat": "https://uri.fiware.org/ns/datamodels/boat", + "onFoot": "https://uri.fiware.org/ns/datamodels/onFoot", + "publicTransport": "https://uri.fiware.org/ns/datamodels/publicTransport", + "museumType": "https://uri.fiware.org/ns/datamodels/museumType", + "owner": "https://uri.fiware.org/ns/datamodels/owner", + "historicalPeriod": "https://uri.fiware.org/ns/datamodels/historicalPeriod", + "artPeriod": "https://uri.fiware.org/ns/datamodels/artPeriod", + "buildingType": "https://uri.fiware.org/ns/datamodels/buildingType", + "featuredArtist": "https://uri.fiware.org/ns/datamodels/featuredArtist", + "touristArea": "https://uri.fiware.org/ns/datamodels/touristArea", + "openingHoursSpecification": "https://uri.fiware.org/ns/datamodels/openingHoursSpecification", + "Museum": "https://uri.fiware.org/ns/datamodels/Museum", + "appliedArts": "https://uri.fiware.org/ns/datamodels/appliedArts", + "scienceAndTechnology": "https://uri.fiware.org/ns/datamodels/scienceAndTechnology", + "fineArts": "https://uri.fiware.org/ns/datamodels/fineArts", + "music": "https://uri.fiware.org/ns/datamodels/music", + "history": "https://uri.fiware.org/ns/datamodels/history", + "sacredArt": "https://uri.fiware.org/ns/datamodels/sacredArt", + "archaeology": "https://uri.fiware.org/ns/datamodels/archaeology", + "specials": "https://uri.fiware.org/ns/datamodels/specials", + "decorativeArts": "https://uri.fiware.org/ns/datamodels/decorativeArts", + "literature": "https://uri.fiware.org/ns/datamodels/literature", + "medicineAndPharmacy": "https://uri.fiware.org/ns/datamodels/medicineAndPharmacy", + "maritime": "https://uri.fiware.org/ns/datamodels/maritime", + "transports": "https://uri.fiware.org/ns/datamodels/transports", + "military": "https://uri.fiware.org/ns/datamodels/military", + "wax": "https://uri.fiware.org/ns/datamodels/wax", + "popularArtsAndTraditions": "https://uri.fiware.org/ns/datamodels/popularArtsAndTraditions", + "numismatic": "https://uri.fiware.org/ns/datamodels/numismatic", + "unesco": "https://uri.fiware.org/ns/datamodels/unesco", + "ceramics": "https://uri.fiware.org/ns/datamodels/ceramics", + "sumptuaryArts": "https://uri.fiware.org/ns/datamodels/sumptuaryArts", + "naturalScience": "https://uri.fiware.org/ns/datamodels/naturalScience", + "prehistoric": "https://uri.fiware.org/ns/datamodels/prehistoric", + "ethnology": "https://uri.fiware.org/ns/datamodels/ethnology", + "railway": "https://uri.fiware.org/ns/datamodels/railway", + "mining": "https://uri.fiware.org/ns/datamodels/mining", + "textile": "https://uri.fiware.org/ns/datamodels/textile", + "sculpture": "https://uri.fiware.org/ns/datamodels/sculpture", + "multiDisciplinar": "https://uri.fiware.org/ns/datamodels/multiDisciplinar", + "painting": "https://uri.fiware.org/ns/datamodels/painting", + "paleonthology": "https://uri.fiware.org/ns/datamodels/paleonthology", + "modernArt": "https://uri.fiware.org/ns/datamodels/modernArt", + "thematic": "https://uri.fiware.org/ns/datamodels/thematic", + "architecture": "https://uri.fiware.org/ns/datamodels/architecture", + "museumHouse": "https://uri.fiware.org/ns/datamodels/museumHouse", + "cathedralMuseum": "https://uri.fiware.org/ns/datamodels/cathedralMuseum", + "diocesanMuseum": "https://uri.fiware.org/ns/datamodels/diocesanMuseum", + "universitary": "https://uri.fiware.org/ns/datamodels/universitary", + "contemporaryArt": "https://uri.fiware.org/ns/datamodels/contemporaryArt", + "bullfighting": "https://uri.fiware.org/ns/datamodels/bullfighting", + "elevator": "https://uri.fiware.org/ns/datamodels/elevator", + "cafeteria": "https://uri.fiware.org/ns/datamodels/cafeteria", + "shop": "https://uri.fiware.org/ns/datamodels/shop", + "auditory": "https://uri.fiware.org/ns/datamodels/auditory", + "conferenceRoom": "https://uri.fiware.org/ns/datamodels/conferenceRoom", + "audioguide": "https://uri.fiware.org/ns/datamodels/audioguide", + "cloakRoom": "https://uri.fiware.org/ns/datamodels/cloakRoom", + "forDisabled": "https://uri.fiware.org/ns/datamodels/forDisabled", + "forBabies": "https://uri.fiware.org/ns/datamodels/forBabies", + "guidedTour": "https://uri.fiware.org/ns/datamodels/guidedTour", + "restaurant": "https://uri.fiware.org/ns/datamodels/restaurant", + "ramp": "https://uri.fiware.org/ns/datamodels/ramp", + "reservation": "https://uri.fiware.org/ns/datamodels/reservation", + "prehistoricPlace": "https://uri.fiware.org/ns/datamodels/prehistoricPlace", + "acropolis": "https://uri.fiware.org/ns/datamodels/acropolis", + "alcazaba": "https://uri.fiware.org/ns/datamodels/alcazaba", + "aqueduct": "https://uri.fiware.org/ns/datamodels/aqueduct", + "alcazar": "https://uri.fiware.org/ns/datamodels/alcazar", + "amphitheatre": "https://uri.fiware.org/ns/datamodels/amphitheatre", + "arch": "https://uri.fiware.org/ns/datamodels/arch", + "polularArchitecture": "https://uri.fiware.org/ns/datamodels/polularArchitecture", + "basilica": "https://uri.fiware.org/ns/datamodels/basilica", + "chapel": "https://uri.fiware.org/ns/datamodels/chapel", + "cartuja": "https://uri.fiware.org/ns/datamodels/cartuja", + "nobleHouse": "https://uri.fiware.org/ns/datamodels/nobleHouse", + "residence": "https://uri.fiware.org/ns/datamodels/residence", + "castle": "https://uri.fiware.org/ns/datamodels/castle", + "castro": "https://uri.fiware.org/ns/datamodels/castro", + "catacombs": "https://uri.fiware.org/ns/datamodels/catacombs", + "cathedral": "https://uri.fiware.org/ns/datamodels/cathedral", + "cloister": "https://uri.fiware.org/ns/datamodels/cloister", + "convent": "https://uri.fiware.org/ns/datamodels/convent", + "prehistoricCave": "https://uri.fiware.org/ns/datamodels/prehistoricCave", + "dolmen": "https://uri.fiware.org/ns/datamodels/dolmen", + "officeBuilding": "https://uri.fiware.org/ns/datamodels/officeBuilding", + "houseBuilding": "https://uri.fiware.org/ns/datamodels/houseBuilding", + "industrialBuilding": "https://uri.fiware.org/ns/datamodels/industrialBuilding", + "militaryBuilding": "https://uri.fiware.org/ns/datamodels/militaryBuilding", + "hermitage": "https://uri.fiware.org/ns/datamodels/hermitage", + "fortress": "https://uri.fiware.org/ns/datamodels/fortress", + "sculpturalGroups": "https://uri.fiware.org/ns/datamodels/sculpturalGroups", + "church": "https://uri.fiware.org/ns/datamodels/church", + "fishMarket": "https://uri.fiware.org/ns/datamodels/fishMarket", + "masia": "https://uri.fiware.org/ns/datamodels/masia", + "masiaFortificada": "https://uri.fiware.org/ns/datamodels/masiaFortificada", + "minaret": "https://uri.fiware.org/ns/datamodels/minaret", + "monastery": "https://uri.fiware.org/ns/datamodels/monastery", + "monolith": "https://uri.fiware.org/ns/datamodels/monolith", + "walls": "https://uri.fiware.org/ns/datamodels/walls", + "necropolis": "https://uri.fiware.org/ns/datamodels/necropolis", + "menhir": "https://uri.fiware.org/ns/datamodels/menhir", + "mansion": "https://uri.fiware.org/ns/datamodels/mansion", + "palace": "https://uri.fiware.org/ns/datamodels/palace", + "pantheon": "https://uri.fiware.org/ns/datamodels/pantheon", + "pazo": "https://uri.fiware.org/ns/datamodels/pazo", + "pyramid": "https://uri.fiware.org/ns/datamodels/pyramid", + "gate": "https://uri.fiware.org/ns/datamodels/gate", + "arcade": "https://uri.fiware.org/ns/datamodels/arcade", + "walledArea": "https://uri.fiware.org/ns/datamodels/walledArea", + "sanctuary": "https://uri.fiware.org/ns/datamodels/sanctuary", + "grave": "https://uri.fiware.org/ns/datamodels/grave", + "synagogue": "https://uri.fiware.org/ns/datamodels/synagogue", + "taulasTalayotsNavetas": "https://uri.fiware.org/ns/datamodels/taulasTalayotsNavetas", + "theathre": "https://uri.fiware.org/ns/datamodels/theathre", + "temple": "https://uri.fiware.org/ns/datamodels/temple", + "spring": "https://uri.fiware.org/ns/datamodels/spring", + "tower": "https://uri.fiware.org/ns/datamodels/tower", + "archeologicalSite": "https://uri.fiware.org/ns/datamodels/archeologicalSite", + "university": "https://uri.fiware.org/ns/datamodels/university", + "graveyard": "https://uri.fiware.org/ns/datamodels/graveyard", + "fortifiedTemple": "https://uri.fiware.org/ns/datamodels/fortifiedTemple", + "civilEngineering": "https://uri.fiware.org/ns/datamodels/civilEngineering", + "square": "https://uri.fiware.org/ns/datamodels/square", + "seminar": "https://uri.fiware.org/ns/datamodels/seminar", + "bullfightingRing": "https://uri.fiware.org/ns/datamodels/bullfightingRing", + "publicBuilding": "https://uri.fiware.org/ns/datamodels/publicBuilding", + "town": "https://uri.fiware.org/ns/datamodels/town", + "cavesAndTouristicMines": "https://uri.fiware.org/ns/datamodels/cavesAndTouristicMines", + "proCathedral": "https://uri.fiware.org/ns/datamodels/proCathedral", + "mosque": "https://uri.fiware.org/ns/datamodels/mosque", + "circus": "https://uri.fiware.org/ns/datamodels/circus", + "burialMound": "https://uri.fiware.org/ns/datamodels/burialMound", + "Open311ServiceType": "https://uri.fiware.org/ns/datamodels/Open311ServiceType", + "Open311ServiceRequest": "https://uri.fiware.org/ns/datamodels/Open311ServiceRequest", + "containedInPlace": "https://uri.fiware.org/ns/datamodels/containedInPlace", + "occupier": "https://uri.fiware.org/ns/datamodels/occupier", + "floorsAboveGround": "https://uri.fiware.org/ns/datamodels/floorsAboveGround", + "floorsBelowGround": "https://uri.fiware.org/ns/datamodels/floorsBelowGround", + "refMap": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refMap" + }, + "openingHours": "http://schema.org/openingHours", + "Building": "https://uri.fiware.org/ns/datamodels/Building", + "apartments": "https://uri.fiware.org/ns/datamodels/apartments", + "farm": "https://uri.fiware.org/ns/datamodels/farm", + "hotel": "https://uri.fiware.org/ns/datamodels/hotel", + "house": "https://uri.fiware.org/ns/datamodels/house", + "detached": "https://uri.fiware.org/ns/datamodels/detached", + "residential": "https://uri.fiware.org/ns/datamodels/residential", + "dormitory": "https://uri.fiware.org/ns/datamodels/dormitory", + "terrace": "https://uri.fiware.org/ns/datamodels/terrace", + "houseboat": "https://uri.fiware.org/ns/datamodels/houseboat", + "bungalow": "https://uri.fiware.org/ns/datamodels/bungalow", + "static_caravan": "https://uri.fiware.org/ns/datamodels/static_caravan", + "commercial": "https://uri.fiware.org/ns/datamodels/commercial", + "office": "https://uri.fiware.org/ns/datamodels/office", + "industrial": "https://uri.fiware.org/ns/datamodels/industrial", + "retail": "https://uri.fiware.org/ns/datamodels/retail", + "warehouse": "https://uri.fiware.org/ns/datamodels/warehouse", + "kiosk": "https://uri.fiware.org/ns/datamodels/kiosk", + "bakehouse": "https://uri.fiware.org/ns/datamodels/bakehouse", + "kindergarten": "https://uri.fiware.org/ns/datamodels/kindergarten", + "shrine": "https://uri.fiware.org/ns/datamodels/shrine", + "civic": "https://uri.fiware.org/ns/datamodels/civic", + "hospital": "https://uri.fiware.org/ns/datamodels/hospital", + "school": "https://uri.fiware.org/ns/datamodels/school", + "stadium": "https://uri.fiware.org/ns/datamodels/stadium", + "train_station": "https://uri.fiware.org/ns/datamodels/train_station", + "transportation": "https://uri.fiware.org/ns/datamodels/transportation", + "grandstand": "https://uri.fiware.org/ns/datamodels/grandstand", + "public": "https://uri.fiware.org/ns/datamodels/public", + "barn": "https://uri.fiware.org/ns/datamodels/barn", + "bunker": "https://uri.fiware.org/ns/datamodels/bunker", + "cabin": "https://uri.fiware.org/ns/datamodels/cabin", + "carport": "https://uri.fiware.org/ns/datamodels/carport", + "conservatory": "https://uri.fiware.org/ns/datamodels/conservatory", + "construction": "https://uri.fiware.org/ns/datamodels/construction", + "cowshed": "https://uri.fiware.org/ns/datamodels/cowshed", + "digester": "https://uri.fiware.org/ns/datamodels/digester", + "farm_auxiliary": "https://uri.fiware.org/ns/datamodels/farm_auxiliary", + "garage": "https://uri.fiware.org/ns/datamodels/garage", + "garages": "https://uri.fiware.org/ns/datamodels/garages", + "garbage_shed": "https://uri.fiware.org/ns/datamodels/garbage_shed", + "greenhouse": "https://uri.fiware.org/ns/datamodels/greenhouse", + "hangar": "https://uri.fiware.org/ns/datamodels/hangar", + "hut": "https://uri.fiware.org/ns/datamodels/hut", + "pavilion": "https://uri.fiware.org/ns/datamodels/pavilion", + "riding_hall": "https://uri.fiware.org/ns/datamodels/riding_hall", + "roof": "https://uri.fiware.org/ns/datamodels/roof", + "shed": "https://uri.fiware.org/ns/datamodels/shed", + "stable": "https://uri.fiware.org/ns/datamodels/stable", + "sty": "https://uri.fiware.org/ns/datamodels/sty", + "transformer_tower": "https://uri.fiware.org/ns/datamodels/transformer_tower", + "service": "https://uri.fiware.org/ns/datamodels/service", + "ruins": "https://uri.fiware.org/ns/datamodels/ruins", + "water_tower": "https://uri.fiware.org/ns/datamodels/water_tower", + "refBuilding": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refBuilding" + }, + "refOperator": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refOperator" + }, + "operationType": "https://uri.fiware.org/ns/datamodels/operationType", + "result": "https://uri.fiware.org/ns/datamodels/result", + "startDate": "https://uri.fiware.org/ns/datamodels/startDate", + "endDate": "https://uri.fiware.org/ns/datamodels/endDate", + "dateStarted": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateStarted" + }, + "dateFinished": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateFinished" + }, + "operationSequence": "https://uri.fiware.org/ns/datamodels/operationSequence", + "refRelatedBuildingOperation": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refRelatedBuildingOperation" + }, + "refRelatedDeviceOperation": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refRelatedDeviceOperation" + }, + "BuildingOperation": "https://uri.fiware.org/ns/datamodels/BuildingOperation", + "aborted": "https://uri.fiware.org/ns/datamodels/aborted", + "planned": "https://uri.fiware.org/ns/datamodels/planned", + "ongoing": "https://uri.fiware.org/ns/datamodels/ongoing", + "finished": "https://uri.fiware.org/ns/datamodels/finished", + "scheduled": "https://uri.fiware.org/ns/datamodels/scheduled", + "cancelled": "https://uri.fiware.org/ns/datamodels/cancelled", + "OnStreetParking": "https://uri.fiware.org/ns/datamodels/OnStreetParking", + "ParkingGroup": "https://uri.fiware.org/ns/datamodels/ParkingGroup", + "refParkingGroup": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refParkingGroup" + }, + "refParkingSite": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refParkingSite" + }, + "TimeInstant": "https://uri.fiware.org/ns/datamodels/TimeInstant", + "ParkingSpot": "https://uri.fiware.org/ns/datamodels/ParkingSpot", + "occupied": "https://uri.fiware.org/ns/datamodels/occupied", + "free": "https://uri.fiware.org/ns/datamodels/free", + "closed": "https://uri.fiware.org/ns/datamodels/closed", + "unknown": "https://uri.fiware.org/ns/datamodels/unknown", + "onstreet": "https://uri.fiware.org/ns/datamodels/onstreet", + "offstreet": "https://uri.fiware.org/ns/datamodels/offstreet", + "ParkingAccess": "https://uri.fiware.org/ns/datamodels/ParkingAccess", + "extCategory": "https://uri.fiware.org/ns/datamodels/extCategory", + "allowedVehicleType": "https://uri.fiware.org/ns/datamodels/allowedVehicleType", + "chargeType": "https://uri.fiware.org/ns/datamodels/chargeType", + "requiredPermit": "https://uri.fiware.org/ns/datamodels/requiredPermit", + "occupancyDetectionType": "https://uri.fiware.org/ns/datamodels/occupancyDetectionType", + "acceptedPaymentMethod": "https://uri.fiware.org/ns/datamodels/acceptedPaymentMethod", + "priceRatePerMinute": "https://uri.fiware.org/ns/datamodels/priceRatePerMinute", + "priceCurrency": "https://uri.fiware.org/ns/datamodels/priceCurrency", + "layout": "https://uri.fiware.org/ns/datamodels/layout", + "usageScenario": "https://uri.fiware.org/ns/datamodels/usageScenario", + "parkingMode": "https://uri.fiware.org/ns/datamodels/parkingMode", + "security": "https://uri.fiware.org/ns/datamodels/security", + "highestFloor": "https://uri.fiware.org/ns/datamodels/highestFloor", + "lowestFloor": "https://uri.fiware.org/ns/datamodels/lowestFloor", + "maximumAllowedDuration": "https://uri.fiware.org/ns/datamodels/maximumAllowedDuration", + "totalSpotNumber": "https://uri.fiware.org/ns/datamodels/totalSpotNumber", + "availableSpotNumber": "https://uri.fiware.org/ns/datamodels/availableSpotNumber", + "extraSpotNumber": "https://uri.fiware.org/ns/datamodels/extraSpotNumber", + "firstAvailableFloor": "https://uri.fiware.org/ns/datamodels/firstAvailableFloor", + "specialLocation": "https://uri.fiware.org/ns/datamodels/specialLocation", + "reservationType": "https://uri.fiware.org/ns/datamodels/reservationType", + "provider": "https://uri.fiware.org/ns/datamodels/provider", + "measuresPeriod": "https://uri.fiware.org/ns/datamodels/measuresPeriod", + "measuresPeriodUnit": "https://uri.fiware.org/ns/datamodels/measuresPeriodUnit", + "averageSpotWidth": "https://uri.fiware.org/ns/datamodels/averageSpotWidth", + "averageSpotLength": "https://uri.fiware.org/ns/datamodels/averageSpotLength", + "maximumAllowedHeight": "https://uri.fiware.org/ns/datamodels/maximumAllowedHeight", + "maximumAllowedWidth": "https://uri.fiware.org/ns/datamodels/maximumAllowedWidth", + "refParkingAccess": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refParkingAccess" + }, + "refParkingSpot": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refParkingSpot" + }, + "aggregateRating": "https://uri.fiware.org/ns/datamodels/aggregateRating", + "OffStreetParking": "https://uri.fiware.org/ns/datamodels/OffStreetParking", + "private": "https://uri.fiware.org/ns/datamodels/private", + "publicPrivate": "https://uri.fiware.org/ns/datamodels/publicPrivate", + "urbanDeterrentParking": "https://uri.fiware.org/ns/datamodels/urbanDeterrentParking", + "parkingGarage": "https://uri.fiware.org/ns/datamodels/parkingGarage", + "parkingLot": "https://uri.fiware.org/ns/datamodels/parkingLot", + "shortTerm": "https://uri.fiware.org/ns/datamodels/shortTerm", + "mediumTerm": "https://uri.fiware.org/ns/datamodels/mediumTerm", + "longTerm": "https://uri.fiware.org/ns/datamodels/longTerm", + "feeCharged": "https://uri.fiware.org/ns/datamodels/feeCharged", + "staffed": "https://uri.fiware.org/ns/datamodels/staffed", + "guarded": "https://uri.fiware.org/ns/datamodels/guarded", + "barrierAccess": "https://uri.fiware.org/ns/datamodels/barrierAccess", + "gateAccess": "https://uri.fiware.org/ns/datamodels/gateAccess", + "freeAccess": "https://uri.fiware.org/ns/datamodels/freeAccess", + "onlyResidents": "https://uri.fiware.org/ns/datamodels/onlyResidents", + "onlyWithPermit": "https://uri.fiware.org/ns/datamodels/onlyWithPermit", + "forEmployees": "https://uri.fiware.org/ns/datamodels/forEmployees", + "forVisitors": "https://uri.fiware.org/ns/datamodels/forVisitors", + "forCustomers": "https://uri.fiware.org/ns/datamodels/forCustomers", + "forStudents": "https://uri.fiware.org/ns/datamodels/forStudents", + "forMembers": "https://uri.fiware.org/ns/datamodels/forMembers", + "forResidents": "https://uri.fiware.org/ns/datamodels/forResidents", + "forElectricalCharging": "https://uri.fiware.org/ns/datamodels/forElectricalCharging", + "underground": "https://uri.fiware.org/ns/datamodels/underground", + "ground": "https://uri.fiware.org/ns/datamodels/ground", + "agriculturalVehicle": "https://uri.fiware.org/ns/datamodels/agriculturalVehicle", + "bicycle": "https://uri.fiware.org/ns/datamodels/bicycle", + "bus": "https://uri.fiware.org/ns/datamodels/bus", + "car": "https://uri.fiware.org/ns/datamodels/car", + "caravan": "https://uri.fiware.org/ns/datamodels/caravan", + "carWithCaravan": "https://uri.fiware.org/ns/datamodels/carWithCaravan", + "carWithTrailer": "https://uri.fiware.org/ns/datamodels/carWithTrailer", + "constructionOrMaintenanceVehicle": "https://uri.fiware.org/ns/datamodels/constructionOrMaintenanceVehicle", + "lorry": "https://uri.fiware.org/ns/datamodels/lorry", + "moped": "https://uri.fiware.org/ns/datamodels/moped", + "motorcycle": "https://uri.fiware.org/ns/datamodels/motorcycle", + "motorcycleWithSideCar": "https://uri.fiware.org/ns/datamodels/motorcycleWithSideCar", + "motorscooter": "https://uri.fiware.org/ns/datamodels/motorscooter", + "tanker": "https://uri.fiware.org/ns/datamodels/tanker", + "trailer": "https://uri.fiware.org/ns/datamodels/trailer", + "van": "https://uri.fiware.org/ns/datamodels/van", + "anyVehicle": "https://uri.fiware.org/ns/datamodels/anyVehicle", + "flat": "https://uri.fiware.org/ns/datamodels/flat", + "minimum": "https://uri.fiware.org/ns/datamodels/minimum", + "maximum": "https://uri.fiware.org/ns/datamodels/maximum", + "additionalIntervalPrice": "https://uri.fiware.org/ns/datamodels/additionalIntervalPrice", + "seasonTicket": "https://uri.fiware.org/ns/datamodels/seasonTicket", + "temporaryPrice": "https://uri.fiware.org/ns/datamodels/temporaryPrice", + "firstIntervalPrice": "https://uri.fiware.org/ns/datamodels/firstIntervalPrice", + "annualPayment": "https://uri.fiware.org/ns/datamodels/annualPayment", + "monthlyPayment": "https://uri.fiware.org/ns/datamodels/monthlyPayment", + "employeePermit": "https://uri.fiware.org/ns/datamodels/employeePermit", + "studentPermit": "https://uri.fiware.org/ns/datamodels/studentPermit", + "fairPermit": "https://uri.fiware.org/ns/datamodels/fairPermit", + "governmentPermit": "https://uri.fiware.org/ns/datamodels/governmentPermit", + "residentPermit": "https://uri.fiware.org/ns/datamodels/residentPermit", + "specificIdentifiedVehiclePermit": "https://uri.fiware.org/ns/datamodels/specificIdentifiedVehiclePermit", + "visitorPermit": "https://uri.fiware.org/ns/datamodels/visitorPermit", + "noPermitNeeded": "https://uri.fiware.org/ns/datamodels/noPermitNeeded", + "none": "https://uri.fiware.org/ns/datamodels/none", + "balancing": "https://uri.fiware.org/ns/datamodels/balancing", + "singleSpaceDetection": "https://uri.fiware.org/ns/datamodels/singleSpaceDetection", + "modelBased": "https://uri.fiware.org/ns/datamodels/modelBased", + "ByBankTransferInAdvance": "https://uri.fiware.org/ns/datamodels/ByBankTransferInAdvance", + "ByInvoice": "https://uri.fiware.org/ns/datamodels/ByInvoice", + "Cash": "https://uri.fiware.org/ns/datamodels/Cash", + "CheckInAdvance": "https://uri.fiware.org/ns/datamodels/CheckInAdvance", + "COD": "https://uri.fiware.org/ns/datamodels/COD", + "DirectDebit": "https://uri.fiware.org/ns/datamodels/DirectDebit", + "GoogleCheckout": "https://uri.fiware.org/ns/datamodels/GoogleCheckout", + "PayPal": "https://uri.fiware.org/ns/datamodels/PayPal", + "PaySwarm": "https://uri.fiware.org/ns/datamodels/PaySwarm", + "automatedParkingGarage": "https://uri.fiware.org/ns/datamodels/automatedParkingGarage", + "surface": "https://uri.fiware.org/ns/datamodels/surface", + "multiStorey": "https://uri.fiware.org/ns/datamodels/multiStorey", + "singleLevel": "https://uri.fiware.org/ns/datamodels/singleLevel", + "multiLevel": "https://uri.fiware.org/ns/datamodels/multiLevel", + "openSpace": "https://uri.fiware.org/ns/datamodels/openSpace", + "covered": "https://uri.fiware.org/ns/datamodels/covered", + "nested": "https://uri.fiware.org/ns/datamodels/nested", + "field": "https://uri.fiware.org/ns/datamodels/field", + "rooftop": "https://uri.fiware.org/ns/datamodels/rooftop", + "sheds": "https://uri.fiware.org/ns/datamodels/sheds", + "carports": "https://uri.fiware.org/ns/datamodels/carports", + "garageBoxes": "https://uri.fiware.org/ns/datamodels/garageBoxes", + "truckParking": "https://uri.fiware.org/ns/datamodels/truckParking", + "parkAndRide": "https://uri.fiware.org/ns/datamodels/parkAndRide", + "parkAndCycle": "https://uri.fiware.org/ns/datamodels/parkAndCycle", + "parkAndWalk": "https://uri.fiware.org/ns/datamodels/parkAndWalk", + "kissAndRide": "https://uri.fiware.org/ns/datamodels/kissAndRide", + "liftshare": "https://uri.fiware.org/ns/datamodels/liftshare", + "carSharing": "https://uri.fiware.org/ns/datamodels/carSharing", + "restArea": "https://uri.fiware.org/ns/datamodels/restArea", + "serviceArea": "https://uri.fiware.org/ns/datamodels/serviceArea", + "dropOffWithValet": "https://uri.fiware.org/ns/datamodels/dropOffWithValet", + "dropOffMechanical": "https://uri.fiware.org/ns/datamodels/dropOffMechanical", + "eventParking": "https://uri.fiware.org/ns/datamodels/eventParking", + "automaticParkingGuidance": "https://uri.fiware.org/ns/datamodels/automaticParkingGuidance", + "staffGuidesToSpace": "https://uri.fiware.org/ns/datamodels/staffGuidesToSpace", + "vehicleLift": "https://uri.fiware.org/ns/datamodels/vehicleLift", + "loadingBay": "https://uri.fiware.org/ns/datamodels/loadingBay", + "dropOff": "https://uri.fiware.org/ns/datamodels/dropOff", + "overnightParking": "https://uri.fiware.org/ns/datamodels/overnightParking", + "perpendicularParking": "https://uri.fiware.org/ns/datamodels/perpendicularParking", + "parallelParking": "https://uri.fiware.org/ns/datamodels/parallelParking", + "echelonParking": "https://uri.fiware.org/ns/datamodels/echelonParking", + "toilet": "https://uri.fiware.org/ns/datamodels/toilet", + "shower": "https://uri.fiware.org/ns/datamodels/shower", + "informationPoint": "https://uri.fiware.org/ns/datamodels/informationPoint", + "internetWireless": "https://uri.fiware.org/ns/datamodels/internetWireless", + "payDesk": "https://uri.fiware.org/ns/datamodels/payDesk", + "paymentMachine": "https://uri.fiware.org/ns/datamodels/paymentMachine", + "cashMachine": "https://uri.fiware.org/ns/datamodels/cashMachine", + "vendingMachine": "https://uri.fiware.org/ns/datamodels/vendingMachine", + "faxMachineOrService": "https://uri.fiware.org/ns/datamodels/faxMachineOrService", + "copyMachineOrService": "https://uri.fiware.org/ns/datamodels/copyMachineOrService", + "safeDeposit": "https://uri.fiware.org/ns/datamodels/safeDeposit", + "luggageLocker": "https://uri.fiware.org/ns/datamodels/luggageLocker", + "publicPhone": "https://uri.fiware.org/ns/datamodels/publicPhone", + "dumpingStation": "https://uri.fiware.org/ns/datamodels/dumpingStation", + "freshWater": "https://uri.fiware.org/ns/datamodels/freshWater", + "wasteDisposal": "https://uri.fiware.org/ns/datamodels/wasteDisposal", + "refuseBin": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refuseBin" + }, + "iceFreeScaffold": "https://uri.fiware.org/ns/datamodels/iceFreeScaffold", + "electricChargingStation": "https://uri.fiware.org/ns/datamodels/electricChargingStation", + "bikeParking": "https://uri.fiware.org/ns/datamodels/bikeParking", + "tollTerminal": "https://uri.fiware.org/ns/datamodels/tollTerminal", + "defibrillator": "https://uri.fiware.org/ns/datamodels/defibrillator", + "firstAidEquipment": "https://uri.fiware.org/ns/datamodels/firstAidEquipment", + "fireHose": "https://uri.fiware.org/ns/datamodels/fireHose", + "fireExtinguisher": "https://uri.fiware.org/ns/datamodels/fireExtinguisher", + "fireHydrant": "https://uri.fiware.org/ns/datamodels/fireHydrant", + "patrolled": "https://uri.fiware.org/ns/datamodels/patrolled", + "securityStaff": "https://uri.fiware.org/ns/datamodels/securityStaff", + "externalSecurity": "https://uri.fiware.org/ns/datamodels/externalSecurity", + "cctv": "https://uri.fiware.org/ns/datamodels/cctv", + "dog": "https://uri.fiware.org/ns/datamodels/dog", + "guard24hours": "https://uri.fiware.org/ns/datamodels/guard24hours", + "lighting": "https://uri.fiware.org/ns/datamodels/lighting", + "floodLight": "https://uri.fiware.org/ns/datamodels/floodLight", + "fences": "https://uri.fiware.org/ns/datamodels/fences", + "areaSeperatedFromSurroundings": "https://uri.fiware.org/ns/datamodels/areaSeperatedFromSurroundings", + "airportTerminal": "https://uri.fiware.org/ns/datamodels/airportTerminal", + "exhibitonCentre": "https://uri.fiware.org/ns/datamodels/exhibitonCentre", + "shoppingCentre": "https://uri.fiware.org/ns/datamodels/shoppingCentre", + "specificFacility": "https://uri.fiware.org/ns/datamodels/specificFacility", + "trainStation": "https://uri.fiware.org/ns/datamodels/trainStation", + "campground": "https://uri.fiware.org/ns/datamodels/campground", + "themePark": "https://uri.fiware.org/ns/datamodels/themePark", + "ferryTerminal": "https://uri.fiware.org/ns/datamodels/ferryTerminal", + "vehicleOnRailTerminal": "https://uri.fiware.org/ns/datamodels/vehicleOnRailTerminal", + "coachStation": "https://uri.fiware.org/ns/datamodels/coachStation", + "cableCarStation": "https://uri.fiware.org/ns/datamodels/cableCarStation", + "publicTransportStation": "https://uri.fiware.org/ns/datamodels/publicTransportStation", + "market": "https://uri.fiware.org/ns/datamodels/market", + "religiousCentre": "https://uri.fiware.org/ns/datamodels/religiousCentre", + "conventionCentre": "https://uri.fiware.org/ns/datamodels/conventionCentre", + "cinema": "https://uri.fiware.org/ns/datamodels/cinema", + "skilift": "https://uri.fiware.org/ns/datamodels/skilift", + "open": "https://uri.fiware.org/ns/datamodels/open", + "closedAbnormal": "https://uri.fiware.org/ns/datamodels/closedAbnormal", + "openingTimesInForce": "https://uri.fiware.org/ns/datamodels/openingTimesInForce", + "full": "https://uri.fiware.org/ns/datamodels/full", + "fullAtEntrance": "https://uri.fiware.org/ns/datamodels/fullAtEntrance", + "spacesAvailable": "https://uri.fiware.org/ns/datamodels/spacesAvailable", + "almostFull": "https://uri.fiware.org/ns/datamodels/almostFull", + "optional": "https://uri.fiware.org/ns/datamodels/optional", + "mandatory": "https://uri.fiware.org/ns/datamodels/mandatory", + "notAvailable": "https://uri.fiware.org/ns/datamodels/notAvailable", + "partly": "https://uri.fiware.org/ns/datamodels/partly", + "code": "https://uri.fiware.org/ns/datamodels/code", + "page": "https://uri.fiware.org/ns/datamodels/page", + "zoneCode": "https://uri.fiware.org/ns/datamodels/zoneCode", + "wheelChairAccessible": "https://uri.fiware.org/ns/datamodels/wheelChairAccessible", + "hasParentStation": "https://uri.fiware.org/ns/datamodels/hasParentStation", + "0": "https://uri.fiware.org/ns/datamodels/0", + "1": "https://uri.fiware.org/ns/datamodels/1", + "2": "https://uri.fiware.org/ns/datamodels/2", + "shortName": "https://uri.fiware.org/ns/datamodels/shortName", + "headSign": "https://uri.fiware.org/ns/datamodels/headSign", + "hasRoute": "https://uri.fiware.org/ns/datamodels/hasRoute", + "hasShape": "https://uri.fiware.org/ns/datamodels/hasShape", + "block": "https://uri.fiware.org/ns/datamodels/block", + "hasService": "https://uri.fiware.org/ns/datamodels/hasService", + "direction": "https://uri.fiware.org/ns/datamodels/direction", + "GtfsTrip": "https://uri.fiware.org/ns/datamodels/GtfsTrip", + "distanceTravelled": "https://uri.fiware.org/ns/datamodels/distanceTravelled", + "location": "http://uri.etsi.org/ngsi-ld/location", + "GtfsShape": "https://uri.fiware.org/ns/datamodels/GtfsShape", + "source": "https://uri.fiware.org/ns/datamodels/source", + "phone": "https://uri.fiware.org/ns/datamodels/phone", + "timezone": "https://uri.fiware.org/ns/datamodels/timezone", + "language": "https://uri.fiware.org/ns/datamodels/language", + "address": "http://schema.org/address", + "GtfsAgency": "https://uri.fiware.org/ns/datamodels/GtfsAgency", + "hasOrigin": "https://uri.fiware.org/ns/datamodels/hasOrigin", + "hasDestination": "https://uri.fiware.org/ns/datamodels/hasDestination", + "transferType": "https://uri.fiware.org/ns/datamodels/transferType", + "minimumTransferTime": "https://uri.fiware.org/ns/datamodels/minimumTransferTime", + "GtfsTransferRule": "https://uri.fiware.org/ns/datamodels/GtfsTransferRule", + "3": "https://uri.fiware.org/ns/datamodels/3", + "hasStop": "https://uri.fiware.org/ns/datamodels/hasStop", + "hasTrip": "https://uri.fiware.org/ns/datamodels/hasTrip", + "arrivalTime": "https://uri.fiware.org/ns/datamodels/arrivalTime", + "departureTime": "https://uri.fiware.org/ns/datamodels/departureTime", + "stopSequence": "https://uri.fiware.org/ns/datamodels/stopSequence", + "pickupType": "https://uri.fiware.org/ns/datamodels/pickupType", + "stopHeadsign": "https://uri.fiware.org/ns/datamodels/stopHeadsign", + "dropOffType": "https://uri.fiware.org/ns/datamodels/dropOffType", + "timepoint": "https://uri.fiware.org/ns/datamodels/timepoint", + "GtfsStopTime": "https://uri.fiware.org/ns/datamodels/GtfsStopTime", + "appliesOn": "https://uri.fiware.org/ns/datamodels/appliesOn", + "exceptionType": "https://uri.fiware.org/ns/datamodels/exceptionType", + "GtfsCalendarDateRule": "https://uri.fiware.org/ns/datamodels/GtfsCalendarDateRule", + "hasAccessPoint": "https://uri.fiware.org/ns/datamodels/hasAccessPoint", + "GtfsStation": "https://uri.fiware.org/ns/datamodels/GtfsStation", + "routeColor": "https://uri.fiware.org/ns/datamodels/routeColor", + "routeTextColor": "https://uri.fiware.org/ns/datamodels/routeTextColor", + "routeType": "https://uri.fiware.org/ns/datamodels/routeType", + "routeSortOrder": "https://uri.fiware.org/ns/datamodels/routeSortOrder", + "operatedBy": "https://uri.fiware.org/ns/datamodels/operatedBy", + "GtfsRoute": "https://uri.fiware.org/ns/datamodels/GtfsRoute", + "4": "https://uri.fiware.org/ns/datamodels/4", + "5": "https://uri.fiware.org/ns/datamodels/5", + "6": "https://uri.fiware.org/ns/datamodels/6", + "7": "https://uri.fiware.org/ns/datamodels/7", + "monday": "https://uri.fiware.org/ns/datamodels/monday", + "tuesday": "https://uri.fiware.org/ns/datamodels/tuesday", + "wednesday": "https://uri.fiware.org/ns/datamodels/wednesday", + "thursday": "https://uri.fiware.org/ns/datamodels/thursday", + "friday": "https://uri.fiware.org/ns/datamodels/friday", + "saturday": "https://uri.fiware.org/ns/datamodels/saturday", + "sunday": "https://uri.fiware.org/ns/datamodels/sunday", + "GtfsCalendarRule": "https://uri.fiware.org/ns/datamodels/GtfsCalendarRule", + "startTime": "https://uri.fiware.org/ns/datamodels/startTime", + "endTime": "https://uri.fiware.org/ns/datamodels/endTime", + "headwaySeconds": "https://uri.fiware.org/ns/datamodels/headwaySeconds", + "exactTimes": "https://uri.fiware.org/ns/datamodels/exactTimes", + "GtfsFrequency": "https://uri.fiware.org/ns/datamodels/GtfsFrequency", + "GtfsAccessPoint": "https://uri.fiware.org/ns/datamodels/GtfsAccessPoint", + "GtfsStop": "https://uri.fiware.org/ns/datamodels/GtfsStop", + "GtfsService": "https://uri.fiware.org/ns/datamodels/GtfsService", + "refGtfsTransitFeedFile": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refGtfsTransitFeedFile" + }, + "routeId": "https://uri.fiware.org/ns/datamodels/routeId", + "stopId": "https://uri.fiware.org/ns/datamodels/stopId", + "lastupdatedAt": "https://uri.fiware.org/ns/datamodels/lastupdatedAt", + "arrivalEstimationUpdate": "https://uri.fiware.org/ns/datamodels/arrivalEstimationUpdate", + "ArrivalEstimation": "https://uri.fiware.org/ns/datamodels/ArrivalEstimation", + "remainingTime": "https://uri.fiware.org/ns/datamodels/remainingTime", + "remainingDistance": "https://uri.fiware.org/ns/datamodels/remainingDistance", + "headsign": "https://uri.fiware.org/ns/datamodels/headsign", + "refTargetDevice": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refTargetDevice" + }, + "dateEnergyMeteringStarted": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateEnergyMeteringStarted" + }, + "totalActiveEnergyImport": "https://uri.fiware.org/ns/datamodels/totalActiveEnergyImport", + "totalReactiveEnergyImport": "https://uri.fiware.org/ns/datamodels/totalReactiveEnergyImport", + "totalApparentEnergyImport": "https://uri.fiware.org/ns/datamodels/totalApparentEnergyImport", + "totalActiveEnergyExport": "https://uri.fiware.org/ns/datamodels/totalActiveEnergyExport", + "totalReactiveEnergyExport": "https://uri.fiware.org/ns/datamodels/totalReactiveEnergyExport", + "totalApparentEnergyExport": "https://uri.fiware.org/ns/datamodels/totalApparentEnergyExport", + "totalApparentPower": "https://uri.fiware.org/ns/datamodels/totalApparentPower", + "activeEnergyImport": "https://uri.fiware.org/ns/datamodels/activeEnergyImport", + "reactiveEnergyImport": "https://uri.fiware.org/ns/datamodels/reactiveEnergyImport", + "apparentEnergyImport": "https://uri.fiware.org/ns/datamodels/apparentEnergyImport", + "activeEnergyExport": "https://uri.fiware.org/ns/datamodels/activeEnergyExport", + "reactiveEnergyExport": "https://uri.fiware.org/ns/datamodels/reactiveEnergyExport", + "apparentEnergyExport": "https://uri.fiware.org/ns/datamodels/apparentEnergyExport", + "apparentPower": "https://uri.fiware.org/ns/datamodels/apparentPower", + "totalPowerFactor": "https://uri.fiware.org/ns/datamodels/totalPowerFactor", + "totalDisplacementPowerFactor": "https://uri.fiware.org/ns/datamodels/totalDisplacementPowerFactor", + "displacementPowerFactor": "https://uri.fiware.org/ns/datamodels/displacementPowerFactor", + "current": "https://uri.fiware.org/ns/datamodels/current", + "phaseVoltage": "https://uri.fiware.org/ns/datamodels/phaseVoltage", + "phaseToPhaseVoltage": "https://uri.fiware.org/ns/datamodels/phaseToPhaseVoltage", + "thdVoltage": "https://uri.fiware.org/ns/datamodels/thdVoltage", + "thdCurrent": "https://uri.fiware.org/ns/datamodels/thdCurrent", + "ThreePhaseAcMeasurement": "https://uri.fiware.org/ns/datamodels/ThreePhaseAcMeasurement", + "dateIssued": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateIssued" + }, + "validFrom": "https://uri.fiware.org/ns/datamodels/validFrom", + "validTo": "https://uri.fiware.org/ns/datamodels/validTo", + "severity": "https://uri.fiware.org/ns/datamodels/severity", + "alertSource": "https://uri.fiware.org/ns/datamodels/alertSource", + "data": "https://uri.fiware.org/ns/datamodels/data", + "Alert": "https://uri.fiware.org/ns/datamodels/Alert", + "informational": "https://uri.fiware.org/ns/datamodels/informational", + "critical": "https://uri.fiware.org/ns/datamodels/critical", + "traffic": "https://uri.fiware.org/ns/datamodels/traffic", + "naturalDisaster": "https://uri.fiware.org/ns/datamodels/naturalDisaster", + "weather": "https://uri.fiware.org/ns/datamodels/weather", + "environment": "https://uri.fiware.org/ns/datamodels/environment", + "health": "https://uri.fiware.org/ns/datamodels/health", + "agriculture": "https://uri.fiware.org/ns/datamodels/agriculture", + "subCategory": "https://uri.fiware.org/ns/datamodels/subCategory", + "trafficJam": "https://uri.fiware.org/ns/datamodels/trafficJam", + "carAccident": "https://uri.fiware.org/ns/datamodels/carAccident", + "carWrongDirection": "https://uri.fiware.org/ns/datamodels/carWrongDirection", + "carStopped": "https://uri.fiware.org/ns/datamodels/carStopped", + "pothole": "https://uri.fiware.org/ns/datamodels/pothole", + "roadClosed": "https://uri.fiware.org/ns/datamodels/roadClosed", + "roadWorks": "https://uri.fiware.org/ns/datamodels/roadWorks", + "hazardOnRoad": "https://uri.fiware.org/ns/datamodels/hazardOnRoad", + "injuredBiker": "https://uri.fiware.org/ns/datamodels/injuredBiker", + "flood": "https://uri.fiware.org/ns/datamodels/flood", + "tsunami": "https://uri.fiware.org/ns/datamodels/tsunami", + "coastalEvent": "https://uri.fiware.org/ns/datamodels/coastalEvent", + "earthquake": "https://uri.fiware.org/ns/datamodels/earthquake", + "rainfall": "https://uri.fiware.org/ns/datamodels/rainfall", + "highTemperature": "https://uri.fiware.org/ns/datamodels/highTemperature", + "lowTemperature": "https://uri.fiware.org/ns/datamodels/lowTemperature", + "heatWave": "https://uri.fiware.org/ns/datamodels/heatWave", + "coldWave": "https://uri.fiware.org/ns/datamodels/coldWave", + "ice": "https://uri.fiware.org/ns/datamodels/ice", + "snow": "https://uri.fiware.org/ns/datamodels/snow", + "wind": "https://uri.fiware.org/ns/datamodels/wind", + "fog": "https://uri.fiware.org/ns/datamodels/fog", + "tornado": "https://uri.fiware.org/ns/datamodels/tornado", + "tropicalCyclone": "https://uri.fiware.org/ns/datamodels/tropicalCyclone", + "hurricane": "https://uri.fiware.org/ns/datamodels/hurricane", + "snow/ice": "https://uri.fiware.org/ns/datamodels/snow/ice", + "thunderstorms": "https://uri.fiware.org/ns/datamodels/thunderstorms", + "fireRisk": "https://uri.fiware.org/ns/datamodels/fireRisk", + "avalancheRisk": "https://uri.fiware.org/ns/datamodels/avalancheRisk", + "floodRisk": "https://uri.fiware.org/ns/datamodels/floodRisk", + "airPollution": "https://uri.fiware.org/ns/datamodels/airPollution", + "waterPollution": "https://uri.fiware.org/ns/datamodels/waterPollution", + "pollenConcentration": "https://uri.fiware.org/ns/datamodels/pollenConcentration", + "asthmaAttack": "https://uri.fiware.org/ns/datamodels/asthmaAttack", + "bumpedPatient": "https://uri.fiware.org/ns/datamodels/bumpedPatient", + "fallenPatient": "https://uri.fiware.org/ns/datamodels/fallenPatient", + "heartAttack": "https://uri.fiware.org/ns/datamodels/heartAttack", + "suspiciousAction": "https://uri.fiware.org/ns/datamodels/suspiciousAction", + "robbery": "https://uri.fiware.org/ns/datamodels/robbery", + "assault": "https://uri.fiware.org/ns/datamodels/assault", + "civilDisorder": "https://uri.fiware.org/ns/datamodels/civilDisorder", + "buildingFire": "https://uri.fiware.org/ns/datamodels/buildingFire", + "forestFire": "https://uri.fiware.org/ns/datamodels/forestFire", + "noxiousWeed": "https://uri.fiware.org/ns/datamodels/noxiousWeed", + "snail": "https://uri.fiware.org/ns/datamodels/snail", + "insect": "https://uri.fiware.org/ns/datamodels/insect", + "rodent": "https://uri.fiware.org/ns/datamodels/rodent", + "bacteria": "https://uri.fiware.org/ns/datamodels/bacteria", + "microbe": "https://uri.fiware.org/ns/datamodels/microbe", + "fungus": "https://uri.fiware.org/ns/datamodels/fungus", + "mite": "https://uri.fiware.org/ns/datamodels/mite", + "virus": "https://uri.fiware.org/ns/datamodels/virus", + "nematodes": "https://uri.fiware.org/ns/datamodels/nematodes", + "irrigation": "https://uri.fiware.org/ns/datamodels/irrigation", + "fertilisation": "https://uri.fiware.org/ns/datamodels/fertilisation", + "refActivity": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refActivity" + }, + "refUserDevice": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refUserDevice" + }, + "refUser": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refUser" + }, + "UserContext": "https://uri.fiware.org/ns/datamodels/UserContext", + "dateActivityStarted": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateActivityStarted" + }, + "dateActivityEnded": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateActivityEnded" + }, + "refObject": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refObject" + }, + "activityType": "https://uri.fiware.org/ns/datamodels/activityType", + "refTarget": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refTarget" + }, + "refAgent": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refAgent" + }, + "UserActivity": "https://uri.fiware.org/ns/datamodels/UserActivity", + "agriParcel": "https://uri.fiware.org/ns/datamodels/agriParcel", + "soilTemperature": "https://uri.fiware.org/ns/datamodels/soilTemperature", + "soilMoistureVwc": "https://uri.fiware.org/ns/datamodels/soilMoistureVwc", + "soilMoistureEC": "https://uri.fiware.org/ns/datamodels/soilMoistureEC", + "solarRadiaton": "https://uri.fiware.org/ns/datamodels/solarRadiaton", + "relativeHumidity": "https://uri.fiware.org/ns/datamodels/relativeHumidity", + "atmosphericPressure": "https://uri.fiware.org/ns/datamodels/atmosphericPressure", + "devices": "https://uri.fiware.org/ns/datamodels/devices", + "observedAt": "https://uri.fiware.org/ns/datamodels/observedAt", + "AgriParcelRecord": "https://uri.fiware.org/ns/datamodels/AgriParcelRecord", + "agroVocConcept": "https://uri.fiware.org/ns/datamodels/agroVocConcept", + "agriProductParent": "https://uri.fiware.org/ns/datamodels/agriProductParent", + "agriProductChildren": "https://uri.fiware.org/ns/datamodels/agriProductChildren", + "root": "https://uri.fiware.org/ns/datamodels/root", + "AgriProductType": "https://uri.fiware.org/ns/datamodels/AgriProductType", + "plannedStartAt": "https://uri.fiware.org/ns/datamodels/plannedStartAt", + "plannedEndAt": "https://uri.fiware.org/ns/datamodels/plannedEndAt", + "operator": "https://uri.fiware.org/ns/datamodels/operator", + "startedAt": "https://uri.fiware.org/ns/datamodels/startedAt", + "endedAt": "https://uri.fiware.org/ns/datamodels/endedAt", + "reportedAt": "https://uri.fiware.org/ns/datamodels/reportedAt", + "agriProduct": "https://uri.fiware.org/ns/datamodels/agriProduct", + "quantity": "https://uri.fiware.org/ns/datamodels/quantity", + "waterSource": "https://uri.fiware.org/ns/datamodels/waterSource", + "workOrder": "https://uri.fiware.org/ns/datamodels/workOrder", + "workRecord": "https://uri.fiware.org/ns/datamodels/workRecord", + "irrigationRecord": "https://uri.fiware.org/ns/datamodels/irrigationRecord", + "AgriParcelOperation": "https://uri.fiware.org/ns/datamodels/AgriParcelOperation", + "fertiliser": "https://uri.fiware.org/ns/datamodels/fertiliser", + "inspection": "https://uri.fiware.org/ns/datamodels/inspection", + "pesticide": "https://uri.fiware.org/ns/datamodels/pesticide", + "water": "https://uri.fiware.org/ns/datamodels/water", + "failed": "https://uri.fiware.org/ns/datamodels/failed", + "borehole": "https://uri.fiware.org/ns/datamodels/borehole", + "river": "https://uri.fiware.org/ns/datamodels/river", + "rainwater capture": "https://uri.fiware.org/ns/datamodels/rainwater capture", + "water dam": "https://uri.fiware.org/ns/datamodels/water dam", + "commercial supply": "https://uri.fiware.org/ns/datamodels/commercial supply", + "species": "https://uri.fiware.org/ns/datamodels/species", + "legalId": "https://uri.fiware.org/ns/datamodels/legalId", + "birthdate": "https://uri.fiware.org/ns/datamodels/birthdate", + "sex": "https://uri.fiware.org/ns/datamodels/sex", + "breed": "https://uri.fiware.org/ns/datamodels/breed", + "calvedBy": "https://uri.fiware.org/ns/datamodels/calvedBy", + "siredBy": "https://uri.fiware.org/ns/datamodels/siredBy", + "weight": "https://uri.fiware.org/ns/datamodels/weight", + "ownedBy": "https://uri.fiware.org/ns/datamodels/ownedBy", + "locatedAt": "https://uri.fiware.org/ns/datamodels/locatedAt", + "phenologicalCondition": "https://uri.fiware.org/ns/datamodels/phenologicalCondition", + "reproductiveCondition": "https://uri.fiware.org/ns/datamodels/reproductiveCondition", + "healthCondition": "https://uri.fiware.org/ns/datamodels/healthCondition", + "fedWith": "https://uri.fiware.org/ns/datamodels/fedWith", + "welfareCondition": "https://uri.fiware.org/ns/datamodels/welfareCondition", + "Animal": "https://uri.fiware.org/ns/datamodels/Animal", + "cow": "https://uri.fiware.org/ns/datamodels/cow", + "goat": "https://uri.fiware.org/ns/datamodels/goat", + "horse": "https://uri.fiware.org/ns/datamodels/horse", + "pig": "https://uri.fiware.org/ns/datamodels/pig", + "sheep": "https://uri.fiware.org/ns/datamodels/sheep", + "male": "https://uri.fiware.org/ns/datamodels/male", + "female": "https://uri.fiware.org/ns/datamodels/female", + "lactatingBaby": "https://uri.fiware.org/ns/datamodels/lactatingBaby", + "grazingBaby": "https://uri.fiware.org/ns/datamodels/grazingBaby", + "maleAdult": "https://uri.fiware.org/ns/datamodels/maleAdult", + "femaleAdult": "https://uri.fiware.org/ns/datamodels/femaleAdult", + "maleYoung": "https://uri.fiware.org/ns/datamodels/maleYoung", + "femaleYoung": "https://uri.fiware.org/ns/datamodels/femaleYoung", + "noStatus": "https://uri.fiware.org/ns/datamodels/noStatus", + "inactive": "https://uri.fiware.org/ns/datamodels/inactive", + "inCalf": "https://uri.fiware.org/ns/datamodels/inCalf", + "inHeat": "https://uri.fiware.org/ns/datamodels/inHeat", + "active": "https://uri.fiware.org/ns/datamodels/active", + "healthy": "https://uri.fiware.org/ns/datamodels/healthy", + "sick": "https://uri.fiware.org/ns/datamodels/sick", + "inTreatment": "https://uri.fiware.org/ns/datamodels/inTreatment", + "issue": "https://uri.fiware.org/ns/datamodels/issue", + "adequate": "https://uri.fiware.org/ns/datamodels/adequate", + "landLocation": "https://uri.fiware.org/ns/datamodels/landLocation", + "hasBuilding": "https://uri.fiware.org/ns/datamodels/hasBuilding", + "hasAgriParcel": "https://uri.fiware.org/ns/datamodels/hasAgriParcel", + "AgriFarm": "https://uri.fiware.org/ns/datamodels/AgriFarm", + "belongsTo": "https://uri.fiware.org/ns/datamodels/belongsTo", + "agriParcelParent": "https://uri.fiware.org/ns/datamodels/agriParcelParent", + "agriParcelChildren": "https://uri.fiware.org/ns/datamodels/agriParcelChildren", + "weatherObserved": "https://uri.fiware.org/ns/datamodels/weatherObserved", + "waterQualityObserved": "https://uri.fiware.org/ns/datamodels/waterQualityObserved", + "leafTemperature": "https://uri.fiware.org/ns/datamodels/leafTemperature", + "co2": "https://uri.fiware.org/ns/datamodels/co2", + "dailyLight": "https://uri.fiware.org/ns/datamodels/dailyLight", + "drainFlow": "https://uri.fiware.org/ns/datamodels/drainFlow", + "AgriGreenhouse": "https://uri.fiware.org/ns/datamodels/AgriGreenhouse", + "agriSoil": "https://uri.fiware.org/ns/datamodels/agriSoil", + "agriFertiliser": "https://uri.fiware.org/ns/datamodels/agriFertiliser", + "agriPest": "https://uri.fiware.org/ns/datamodels/agriPest", + "harvestingInterval": "https://uri.fiware.org/ns/datamodels/harvestingInterval", + "plantingFrom": "https://uri.fiware.org/ns/datamodels/plantingFrom", + "wateringFrequency": "https://uri.fiware.org/ns/datamodels/wateringFrequency", + "AgriCrop": "https://uri.fiware.org/ns/datamodels/AgriCrop", + "daily": "https://uri.fiware.org/ns/datamodels/daily", + "weekly": "https://uri.fiware.org/ns/datamodels/weekly", + "biweekly": "https://uri.fiware.org/ns/datamodels/biweekly", + "monthly": "https://uri.fiware.org/ns/datamodels/monthly", + "onDemand": "https://uri.fiware.org/ns/datamodels/onDemand", + "area": "https://uri.fiware.org/ns/datamodels/area", + "agriCrop": "https://uri.fiware.org/ns/datamodels/agriCrop", + "cropStatus": "https://uri.fiware.org/ns/datamodels/cropStatus", + "lastPlantedAt": "https://uri.fiware.org/ns/datamodels/lastPlantedAt", + "AgriParcel": "https://uri.fiware.org/ns/datamodels/AgriParcel", + "seeded": "https://uri.fiware.org/ns/datamodels/seeded", + "justBorn": "https://uri.fiware.org/ns/datamodels/justBorn", + "growing": "https://uri.fiware.org/ns/datamodels/growing", + "maturing": "https://uri.fiware.org/ns/datamodels/maturing", + "readyForHarvesting": "https://uri.fiware.org/ns/datamodels/readyForHarvesting", + "version": "https://uri.fiware.org/ns/datamodels/version", + "endpoint": "https://uri.fiware.org/ns/datamodels/endpoint", + "AgriApp": "https://uri.fiware.org/ns/datamodels/AgriApp", + "TBD": "https://uri.fiware.org/ns/datamodels/TBD", + "relatedSource": "https://uri.fiware.org/ns/datamodels/relatedSource", + "AgriPest": "https://uri.fiware.org/ns/datamodels/AgriPest", + "totalSlotNumber": "https://uri.fiware.org/ns/datamodels/totalSlotNumber", + "freeSlotNumber": "https://uri.fiware.org/ns/datamodels/freeSlotNumber", + "availableBikeNumber": "https://uri.fiware.org/ns/datamodels/availableBikeNumber", + "outOfServiceSlotNumber": "https://uri.fiware.org/ns/datamodels/outOfServiceSlotNumber", + "BikeHireDockingStation": "https://uri.fiware.org/ns/datamodels/BikeHireDockingStation", + "working": "https://uri.fiware.org/ns/datamodels/working", + "outOfService": "https://uri.fiware.org/ns/datamodels/outOfService", + "withIncidence": "https://uri.fiware.org/ns/datamodels/withIncidence", + "empty": "https://uri.fiware.org/ns/datamodels/empty", + "almostEmpty": "https://uri.fiware.org/ns/datamodels/almostEmpty", + "capacity": "https://uri.fiware.org/ns/datamodels/capacity", + "socketNumber": "https://uri.fiware.org/ns/datamodels/socketNumber", + "availableCapacity": "https://uri.fiware.org/ns/datamodels/availableCapacity", + "socketType": "https://uri.fiware.org/ns/datamodels/socketType", + "network": "https://uri.fiware.org/ns/datamodels/network", + "amperage": "https://uri.fiware.org/ns/datamodels/amperage", + "EVChargingStation": "https://uri.fiware.org/ns/datamodels/EVChargingStation", + "truck": "https://uri.fiware.org/ns/datamodels/truck", + "Type2": "https://uri.fiware.org/ns/datamodels/Type2", + "CHAdeMO": "https://uri.fiware.org/ns/datamodels/CHAdeMO", + "CCS/SAE": "https://uri.fiware.org/ns/datamodels/CCS/SAE", + "Type3": "https://uri.fiware.org/ns/datamodels/Type3", + "Tesla": "https://uri.fiware.org/ns/datamodels/Tesla", + "J-1772": "https://uri.fiware.org/ns/datamodels/J-1772", + "Wall_Euro": "https://uri.fiware.org/ns/datamodels/Wall_Euro", + "Caravan_Mains_Socket": "https://uri.fiware.org/ns/datamodels/Caravan_Mains_Socket", + "Dual_J-1772": "https://uri.fiware.org/ns/datamodels/Dual_J-1772", + "Dual_CHAdeMO": "https://uri.fiware.org/ns/datamodels/Dual_CHAdeMO", + "Mennekes": "https://uri.fiware.org/ns/datamodels/Mennekes", + "Dual_Mennekes": "https://uri.fiware.org/ns/datamodels/Dual_Mennekes", + "Other": "https://uri.fiware.org/ns/datamodels/Other", + "refRoadSegment": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refRoadSegment" + }, + "dateObserved": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateObserved" + }, + "dateObservedFrom": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateObservedFrom" + }, + "dateObservedTo": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateObservedTo" + }, + "peopleCount": "https://uri.fiware.org/ns/datamodels/peopleCount", + "occupancy": "https://uri.fiware.org/ns/datamodels/occupancy", + "averageCrowdSpeed": "https://uri.fiware.org/ns/datamodels/averageCrowdSpeed", + "congested": "https://uri.fiware.org/ns/datamodels/congested", + "averageHeadwayTime": "https://uri.fiware.org/ns/datamodels/averageHeadwayTime", + "CrowdFlowObserved": "https://uri.fiware.org/ns/datamodels/CrowdFlowObserved", + "inbound": "https://uri.fiware.org/ns/datamodels/inbound", + "outbound": "https://uri.fiware.org/ns/datamodels/outbound", + "roadClass": "https://uri.fiware.org/ns/datamodels/roadClass", + "Road": "https://uri.fiware.org/ns/datamodels/Road", + "motorway": "https://uri.fiware.org/ns/datamodels/motorway", + "trunk": "https://uri.fiware.org/ns/datamodels/trunk", + "primary": "https://uri.fiware.org/ns/datamodels/primary", + "secondary": "https://uri.fiware.org/ns/datamodels/secondary", + "tertiary": "https://uri.fiware.org/ns/datamodels/tertiary", + "unclassified": "https://uri.fiware.org/ns/datamodels/unclassified", + "vehicleType": "https://uri.fiware.org/ns/datamodels/vehicleType", + "vehicleModelDate": "https://uri.fiware.org/ns/datamodels/vehicleModelDate", + "cargoVolume": "https://uri.fiware.org/ns/datamodels/cargoVolume", + "fuelType": "https://uri.fiware.org/ns/datamodels/fuelType", + "fuelConsumption": "https://uri.fiware.org/ns/datamodels/fuelConsumption", + "height": "https://uri.fiware.org/ns/datamodels/height", + "depth": "https://uri.fiware.org/ns/datamodels/depth", + "vehicleEngine": "https://uri.fiware.org/ns/datamodels/vehicleEngine", + "url": "https://uri.fiware.org/ns/datamodels/url", + "VehicleModel": "https://uri.fiware.org/ns/datamodels/VehicleModel", + "minibus": "https://uri.fiware.org/ns/datamodels/minibus", + "tram": "https://uri.fiware.org/ns/datamodels/tram", + "trolley": "https://uri.fiware.org/ns/datamodels/trolley", + "binTrolley": "https://uri.fiware.org/ns/datamodels/binTrolley", + "sweepingMachine": "https://uri.fiware.org/ns/datamodels/sweepingMachine", + "cleaningTrolley": "https://uri.fiware.org/ns/datamodels/cleaningTrolley", + "gasoline": "https://uri.fiware.org/ns/datamodels/gasoline", + "petrol(unleaded)": "https://uri.fiware.org/ns/datamodels/petrol(unleaded)", + "petrol(leaded)": "https://uri.fiware.org/ns/datamodels/petrol(leaded)", + "petrol": "https://uri.fiware.org/ns/datamodels/petrol", + "diesel": "https://uri.fiware.org/ns/datamodels/diesel", + "electric": "https://uri.fiware.org/ns/datamodels/electric", + "hydrogen": "https://uri.fiware.org/ns/datamodels/hydrogen", + "lpg": "https://uri.fiware.org/ns/datamodels/lpg", + "autogas": "https://uri.fiware.org/ns/datamodels/autogas", + "cng": "https://uri.fiware.org/ns/datamodels/cng", + "biodiesel ethanol": "https://uri.fiware.org/ns/datamodels/biodiesel ethanol", + "hybrid electric/petrol": "https://uri.fiware.org/ns/datamodels/hybrid electric/petrol", + "hybrid electric/diesel": "https://uri.fiware.org/ns/datamodels/hybrid electric/diesel", + "previousLocation": "https://uri.fiware.org/ns/datamodels/previousLocation", + "speed": "https://uri.fiware.org/ns/datamodels/speed", + "heading": "https://uri.fiware.org/ns/datamodels/heading", + "cargoWeight": "https://uri.fiware.org/ns/datamodels/cargoWeight", + "vehicleIdentificationNumber": "https://uri.fiware.org/ns/datamodels/vehicleIdentificationNumber", + "vehiclePlateIdentifier": "https://uri.fiware.org/ns/datamodels/vehiclePlateIdentifier", + "fleetVehicleId": "https://uri.fiware.org/ns/datamodels/fleetVehicleId", + "dateVehicleFirstRegistered": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateVehicleFirstRegistered" + }, + "dateFirstUsed": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateFirstUsed" + }, + "purchaseDate": "https://uri.fiware.org/ns/datamodels/purchaseDate", + "mileageFromOdometer": "https://uri.fiware.org/ns/datamodels/mileageFromOdometer", + "vehicleConfiguration": "https://uri.fiware.org/ns/datamodels/vehicleConfiguration", + "color": "https://uri.fiware.org/ns/datamodels/color", + "feature": "https://uri.fiware.org/ns/datamodels/feature", + "serviceProvided": "https://uri.fiware.org/ns/datamodels/serviceProvided", + "vehicleSpecialUsage": "https://uri.fiware.org/ns/datamodels/vehicleSpecialUsage", + "refVehicleModel": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refVehicleModel" + }, + "serviceStatus": "https://uri.fiware.org/ns/datamodels/serviceStatus", + "Vehicle": "https://uri.fiware.org/ns/datamodels/Vehicle", + "municipalServices": "https://uri.fiware.org/ns/datamodels/municipalServices", + "specialUsage": "https://uri.fiware.org/ns/datamodels/specialUsage", + "tracked": "https://uri.fiware.org/ns/datamodels/tracked", + "nonTracked": "https://uri.fiware.org/ns/datamodels/nonTracked", + "gps": "https://uri.fiware.org/ns/datamodels/gps", + "airbag": "https://uri.fiware.org/ns/datamodels/airbag", + "overspeed": "https://uri.fiware.org/ns/datamodels/overspeed", + "abs": "https://uri.fiware.org/ns/datamodels/abs", + "wifi": "https://uri.fiware.org/ns/datamodels/wifi", + "backCamera": "https://uri.fiware.org/ns/datamodels/backCamera", + "proximitySensor": "https://uri.fiware.org/ns/datamodels/proximitySensor", + "disabledRamp": "https://uri.fiware.org/ns/datamodels/disabledRamp", + "alarm": "https://uri.fiware.org/ns/datamodels/alarm", + "internetConnection": "https://uri.fiware.org/ns/datamodels/internetConnection", + "garbageCollection": "https://uri.fiware.org/ns/datamodels/garbageCollection", + "parksAndGardens": "https://uri.fiware.org/ns/datamodels/parksAndGardens", + "streetLighting": "https://uri.fiware.org/ns/datamodels/streetLighting", + "roadSignalling": "https://uri.fiware.org/ns/datamodels/roadSignalling", + "cargoTransport": "https://uri.fiware.org/ns/datamodels/cargoTransport", + "urbanTransit": "https://uri.fiware.org/ns/datamodels/urbanTransit", + "maintenance": "https://uri.fiware.org/ns/datamodels/maintenance", + "streetCleaning": "https://uri.fiware.org/ns/datamodels/streetCleaning", + "wasteContainerCleaning": "https://uri.fiware.org/ns/datamodels/wasteContainerCleaning", + "auxiliaryServices": "https://uri.fiware.org/ns/datamodels/auxiliaryServices", + "goodsSelling": "https://uri.fiware.org/ns/datamodels/goodsSelling", + "fairground": "https://uri.fiware.org/ns/datamodels/fairground", + "specialTransport": "https://uri.fiware.org/ns/datamodels/specialTransport", + "taxi": "https://uri.fiware.org/ns/datamodels/taxi", + "ambulance": "https://uri.fiware.org/ns/datamodels/ambulance", + "police": "https://uri.fiware.org/ns/datamodels/police", + "fireBrigade": "https://uri.fiware.org/ns/datamodels/fireBrigade", + "schoolTransportation": "https://uri.fiware.org/ns/datamodels/schoolTransportation", + "parked": "https://uri.fiware.org/ns/datamodels/parked", + "onRoute": "https://uri.fiware.org/ns/datamodels/onRoute", + "broken": "https://uri.fiware.org/ns/datamodels/broken", + "laneId": "https://uri.fiware.org/ns/datamodels/laneId", + "averageVehicleSpeed": "https://uri.fiware.org/ns/datamodels/averageVehicleSpeed", + "averageVehicleLength": "https://uri.fiware.org/ns/datamodels/averageVehicleLength", + "averageGapDistance": "https://uri.fiware.org/ns/datamodels/averageGapDistance", + "laneDirection": "https://uri.fiware.org/ns/datamodels/laneDirection", + "reversedLane": "https://uri.fiware.org/ns/datamodels/reversedLane", + "vehicleSubType": "https://uri.fiware.org/ns/datamodels/vehicleSubType", + "TrafficFlowObserved": "https://uri.fiware.org/ns/datamodels/TrafficFlowObserved", + "forward": "https://uri.fiware.org/ns/datamodels/forward", + "backward": "https://uri.fiware.org/ns/datamodels/backward", + "refRoad": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refRoad" + }, + "startPoint": "https://uri.fiware.org/ns/datamodels/startPoint", + "endPoint": "https://uri.fiware.org/ns/datamodels/endPoint", + "startKilometer": "https://uri.fiware.org/ns/datamodels/startKilometer", + "endKilometer": "https://uri.fiware.org/ns/datamodels/endKilometer", + "totalLaneNumber": "https://uri.fiware.org/ns/datamodels/totalLaneNumber", + "maximumAllowedSpeed": "https://uri.fiware.org/ns/datamodels/maximumAllowedSpeed", + "minimumAllowedSpeed": "https://uri.fiware.org/ns/datamodels/minimumAllowedSpeed", + "maximumAllowedWeight": "https://uri.fiware.org/ns/datamodels/maximumAllowedWeight", + "laneUsage": "https://uri.fiware.org/ns/datamodels/laneUsage", + "RoadSegment": "https://uri.fiware.org/ns/datamodels/RoadSegment", + "oneway": "https://uri.fiware.org/ns/datamodels/oneway", + "toll": "https://uri.fiware.org/ns/datamodels/toll", + "link": "https://uri.fiware.org/ns/datamodels/link", + "precipitation": "https://uri.fiware.org/ns/datamodels/precipitation", + "solarRadiation": "https://uri.fiware.org/ns/datamodels/solarRadiation", + "illuminance": "https://uri.fiware.org/ns/datamodels/illuminance", + "pressureTendency": "https://uri.fiware.org/ns/datamodels/pressureTendency", + "dewPoint": "https://uri.fiware.org/ns/datamodels/dewPoint", + "streamGauge": "https://uri.fiware.org/ns/datamodels/streamGauge", + "snowHeight": "https://uri.fiware.org/ns/datamodels/snowHeight", + "WeatherObserved": "https://uri.fiware.org/ns/datamodels/WeatherObserved", + "raising": "https://uri.fiware.org/ns/datamodels/raising", + "falling": "https://uri.fiware.org/ns/datamodels/falling", + "steady": "https://uri.fiware.org/ns/datamodels/steady", + "dateRetrieved": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateRetrieved" + }, + "validity": "https://uri.fiware.org/ns/datamodels/validity", + "dayMaximum": "https://uri.fiware.org/ns/datamodels/dayMaximum", + "dayMinimum": "https://uri.fiware.org/ns/datamodels/dayMinimum", + "uVIndexMax": "https://uri.fiware.org/ns/datamodels/uVIndexMax", + "WeatherForecast": "https://uri.fiware.org/ns/datamodels/WeatherForecast", + "temperature": "https://uri.fiware.org/ns/datamodels/temperature", + "feelLikesTemperature": "https://uri.fiware.org/ns/datamodels/feelLikesTemperature", + "organization": "https://uri.fiware.org/ns/datamodels/organization", + "process": "https://uri.fiware.org/ns/datamodels/process", + "product": "https://uri.fiware.org/ns/datamodels/product", + "businessTarget": "https://uri.fiware.org/ns/datamodels/businessTarget", + "calculationFrequency": "https://uri.fiware.org/ns/datamodels/calculationFrequency", + "calculatedBy": "https://uri.fiware.org/ns/datamodels/calculatedBy", + "calculationMethod": "https://uri.fiware.org/ns/datamodels/calculationMethod", + "calculationFormula": "https://uri.fiware.org/ns/datamodels/calculationFormula", + "aggregatedData": "https://uri.fiware.org/ns/datamodels/aggregatedData", + "calculationPeriod": "https://uri.fiware.org/ns/datamodels/calculationPeriod", + "currentStanding": "https://uri.fiware.org/ns/datamodels/currentStanding", + "kpiValue": "https://uri.fiware.org/ns/datamodels/kpiValue", + "effectiveSince": "https://uri.fiware.org/ns/datamodels/effectiveSince", + "dateNextCalculation": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateNextCalculation" + }, + "dateExpires": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateExpires" + }, + "updatedAt": "https://uri.fiware.org/ns/datamodels/updatedAt", + "KeyPerformanceIndicator": "https://uri.fiware.org/ns/datamodels/KeyPerformanceIndicator", + "quantitative": "https://uri.fiware.org/ns/datamodels/quantitative", + "qualitative": "https://uri.fiware.org/ns/datamodels/qualitative", + "leading": "https://uri.fiware.org/ns/datamodels/leading", + "lagging": "https://uri.fiware.org/ns/datamodels/lagging", + "input": "https://uri.fiware.org/ns/datamodels/input", + "output": "https://uri.fiware.org/ns/datamodels/output", + "practical": "https://uri.fiware.org/ns/datamodels/practical", + "directional": "https://uri.fiware.org/ns/datamodels/directional", + "actionable": "https://uri.fiware.org/ns/datamodels/actionable", + "financial": "https://uri.fiware.org/ns/datamodels/financial", + "hourly": "https://uri.fiware.org/ns/datamodels/hourly", + "yearly": "https://uri.fiware.org/ns/datamodels/yearly", + "quarterly": "https://uri.fiware.org/ns/datamodels/quarterly", + "bimonthly": "https://uri.fiware.org/ns/datamodels/bimonthly", + "veryGood": "https://uri.fiware.org/ns/datamodels/veryGood", + "good": "https://uri.fiware.org/ns/datamodels/good", + "fair": "https://uri.fiware.org/ns/datamodels/fair", + "bad": "https://uri.fiware.org/ns/datamodels/bad", + "veryBad": "https://uri.fiware.org/ns/datamodels/veryBad", + "controlledAsset": "https://uri.fiware.org/ns/datamodels/controlledAsset", + "mnc": "https://uri.fiware.org/ns/datamodels/mnc", + "macAddress": "https://uri.fiware.org/ns/datamodels/macAddress", + "ipAddress": "https://uri.fiware.org/ns/datamodels/ipAddress", + "configuration": "https://uri.fiware.org/ns/datamodels/configuration", + "dateInstalled": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateInstalled" + }, + "dateManufactured": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateManufactured" + }, + "hardwareVersion": "https://uri.fiware.org/ns/datamodels/hardwareVersion", + "softwareVersion": "https://uri.fiware.org/ns/datamodels/softwareVersion", + "firmwareVersion": "https://uri.fiware.org/ns/datamodels/firmwareVersion", + "osVersion": "https://uri.fiware.org/ns/datamodels/osVersion", + "dateLastCalibration": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateLastCalibration" + }, + "refDeviceModel": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refDeviceModel" + }, + "batteryLevel": "https://uri.fiware.org/ns/datamodels/batteryLevel", + "rssi": "https://uri.fiware.org/ns/datamodels/rssi", + "deviceState": "https://uri.fiware.org/ns/datamodels/deviceState", + "dateLastValueReported": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateLastValueReported" + }, + "value": "https://uri.fiware.org/ns/datamodels/value", + "Device": "https://uri.fiware.org/ns/datamodels/Device", + "deviceClass": "https://uri.fiware.org/ns/datamodels/deviceClass", + "controlledProperty": "https://uri.fiware.org/ns/datamodels/controlledProperty", + "function": "https://uri.fiware.org/ns/datamodels/function", + "supportedUnits": "https://uri.fiware.org/ns/datamodels/supportedUnits", + "energyLimitationClass": "https://uri.fiware.org/ns/datamodels/energyLimitationClass", + "documentation": "https://uri.fiware.org/ns/datamodels/documentation", + "DeviceModel": "https://uri.fiware.org/ns/datamodels/DeviceModel", + "C0": "https://uri.fiware.org/ns/datamodels/C0", + "C1": "https://uri.fiware.org/ns/datamodels/C1", + "C2": "https://uri.fiware.org/ns/datamodels/C2", + "humidity": "https://uri.fiware.org/ns/datamodels/humidity", + "light": "https://uri.fiware.org/ns/datamodels/light", + "motion": "https://uri.fiware.org/ns/datamodels/motion", + "fillingLevel": "https://uri.fiware.org/ns/datamodels/fillingLevel", + "power": "https://uri.fiware.org/ns/datamodels/power", + "pressure": "https://uri.fiware.org/ns/datamodels/pressure", + "smoke": "https://uri.fiware.org/ns/datamodels/smoke", + "energy": "https://uri.fiware.org/ns/datamodels/energy", + "noiseLevel": "https://uri.fiware.org/ns/datamodels/noiseLevel", + "weatherConditions": "https://uri.fiware.org/ns/datamodels/weatherConditions", + "windSpeed": "https://uri.fiware.org/ns/datamodels/windSpeed", + "windDirection": "https://uri.fiware.org/ns/datamodels/windDirection", + "pH": "https://uri.fiware.org/ns/datamodels/pH", + "conductivity": "https://uri.fiware.org/ns/datamodels/conductivity", + "conductance": "https://uri.fiware.org/ns/datamodels/conductance", + "tss": "https://uri.fiware.org/ns/datamodels/tss", + "tds": "https://uri.fiware.org/ns/datamodels/tds", + "turbidity": "https://uri.fiware.org/ns/datamodels/turbidity", + "salinity": "https://uri.fiware.org/ns/datamodels/salinity", + "orp": "https://uri.fiware.org/ns/datamodels/orp", + "cdom": "https://uri.fiware.org/ns/datamodels/cdom", + "waterConsumption": "https://uri.fiware.org/ns/datamodels/waterConsumption", + "gasComsumption": "https://uri.fiware.org/ns/datamodels/gasComsumption", + "electricityConsumption": "https://uri.fiware.org/ns/datamodels/electricityConsumption", + "soilMoisture": "https://uri.fiware.org/ns/datamodels/soilMoisture", + "trafficFlow": "https://uri.fiware.org/ns/datamodels/trafficFlow", + "levelControl": "https://uri.fiware.org/ns/datamodels/levelControl", + "sensing": "https://uri.fiware.org/ns/datamodels/sensing", + "onOff": "https://uri.fiware.org/ns/datamodels/onOff", + "openClose": "https://uri.fiware.org/ns/datamodels/openClose", + "metering": "https://uri.fiware.org/ns/datamodels/metering", + "eventNotification": "https://uri.fiware.org/ns/datamodels/eventNotification", + "E0": "https://uri.fiware.org/ns/datamodels/E0", + "E1": "https://uri.fiware.org/ns/datamodels/E1", + "E2": "https://uri.fiware.org/ns/datamodels/E2", + "E9": "https://uri.fiware.org/ns/datamodels/E9", + "areaCovered": "https://uri.fiware.org/ns/datamodels/areaCovered", + "applicationUrl": "https://uri.fiware.org/ns/datamodels/applicationUrl", + "availability": "https://uri.fiware.org/ns/datamodels/availability", + "refRelatedEntity": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refRelatedEntity" + }, + "refSmartSpot": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refSmartSpot" + }, + "SmartPointOfInteraction": "https://uri.fiware.org/ns/datamodels/SmartPointOfInteraction", + "entertainment": "https://uri.fiware.org/ns/datamodels/entertainment", + "infotainment": "https://uri.fiware.org/ns/datamodels/infotainment", + "co-creation": "https://uri.fiware.org/ns/datamodels/co-creation", + "announcedUrl": "https://uri.fiware.org/ns/datamodels/announcedUrl", + "signalStrength": "https://uri.fiware.org/ns/datamodels/signalStrength", + "bluetoothChannel": "https://uri.fiware.org/ns/datamodels/bluetoothChannel", + "coverageRadius": "https://uri.fiware.org/ns/datamodels/coverageRadius", + "announcementPeriod": "https://uri.fiware.org/ns/datamodels/announcementPeriod", + "refSmartPointOfInteraction": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refSmartPointOfInteraction" + }, + "SmartSpot": "https://uri.fiware.org/ns/datamodels/SmartSpot", + "lowest": "https://uri.fiware.org/ns/datamodels/lowest", + "highest": "https://uri.fiware.org/ns/datamodels/highest", + "37": "https://uri.fiware.org/ns/datamodels/37", + "38": "https://uri.fiware.org/ns/datamodels/38", + "39": "https://uri.fiware.org/ns/datamodels/39", + "37,38": "https://uri.fiware.org/ns/datamodels/37,38", + "38,39": "https://uri.fiware.org/ns/datamodels/38,39", + "37,39": "https://uri.fiware.org/ns/datamodels/37,39", + "37,38,39": "https://uri.fiware.org/ns/datamodels/37,38,39", + "airQualityIndex": "https://uri.fiware.org/ns/datamodels/airQualityIndex", + "airQualityLevel": "https://uri.fiware.org/ns/datamodels/airQualityLevel", + "reliability": "https://uri.fiware.org/ns/datamodels/reliability", + "refPointOfInterest": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refPointOfInterest" + }, + "refWeatherObserved": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refWeatherObserved" + }, + "AirQualityObserved": "https://uri.fiware.org/ns/datamodels/AirQualityObserved", + "allergenRisk": "https://uri.fiware.org/ns/datamodels/allergenRisk", + "AeroAllergenObserved": "https://uri.fiware.org/ns/datamodels/AeroAllergenObserved", + "moderate": "https://uri.fiware.org/ns/datamodels/moderate", + "veryHigh": "https://uri.fiware.org/ns/datamodels/veryHigh", + "measurand": "https://uri.fiware.org/ns/datamodels/measurand", + "O2": "https://uri.fiware.org/ns/datamodels/O2", + "Chla": "https://uri.fiware.org/ns/datamodels/Chla", + "PE": "https://uri.fiware.org/ns/datamodels/PE", + "PC": "https://uri.fiware.org/ns/datamodels/PC", + "NH4": "https://uri.fiware.org/ns/datamodels/NH4", + "NH3": "https://uri.fiware.org/ns/datamodels/NH3", + "Cl-": "https://uri.fiware.org/ns/datamodels/Cl-", + "NO3": "https://uri.fiware.org/ns/datamodels/NO3", + "WaterQualityObserved": "https://uri.fiware.org/ns/datamodels/WaterQualityObserved", + "sonometerClass": "https://uri.fiware.org/ns/datamodels/sonometerClass", + "NoiseLevelObserved": "https://uri.fiware.org/ns/datamodels/NoiseLevelObserved", + "taxon": "https://uri.fiware.org/ns/datamodels/taxon", + "shape": "https://uri.fiware.org/ns/datamodels/shape", + "dateLastWatering": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateLastWatering" + }, + "nextWateringDeadline": "https://uri.fiware.org/ns/datamodels/nextWateringDeadline", + "refGarden": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refGarden" + }, + "FlowerBed": "https://uri.fiware.org/ns/datamodels/FlowerBed", + "hedge": "https://uri.fiware.org/ns/datamodels/hedge", + "lawnArea": "https://uri.fiware.org/ns/datamodels/lawnArea", + "portable": "https://uri.fiware.org/ns/datamodels/portable", + "urbanTreeSpot": "https://uri.fiware.org/ns/datamodels/urbanTreeSpot", + "rectangular": "https://uri.fiware.org/ns/datamodels/rectangular", + "elliptic": "https://uri.fiware.org/ns/datamodels/elliptic", + "polygon": "https://uri.fiware.org/ns/datamodels/polygon", + "circular": "https://uri.fiware.org/ns/datamodels/circular", + "style": "https://uri.fiware.org/ns/datamodels/style", + "refRecord": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refRecord" + }, + "Garden": "https://uri.fiware.org/ns/datamodels/Garden", + "botanical": "https://uri.fiware.org/ns/datamodels/botanical", + "community": "https://uri.fiware.org/ns/datamodels/community", + "monastary": "https://uri.fiware.org/ns/datamodels/monastary", + "fencedOff": "https://uri.fiware.org/ns/datamodels/fencedOff", + "english": "https://uri.fiware.org/ns/datamodels/english", + "french": "https://uri.fiware.org/ns/datamodels/french", + "chinese": "https://uri.fiware.org/ns/datamodels/chinese", + "japanese": "https://uri.fiware.org/ns/datamodels/japanese", + "zen": "https://uri.fiware.org/ns/datamodels/zen", + "rosarium": "https://uri.fiware.org/ns/datamodels/rosarium", + "herb_garden": "https://uri.fiware.org/ns/datamodels/herb_garden", + "kitchen": "https://uri.fiware.org/ns/datamodels/kitchen", + "soilMoistureEc": "https://uri.fiware.org/ns/datamodels/soilMoistureEc", + "refGreenspace": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refGreenspace" + }, + "GreenspaceRecord": "https://uri.fiware.org/ns/datamodels/GreenspaceRecord", + "insertHolesNumber": "https://uri.fiware.org/ns/datamodels/insertHolesNumber", + "refWasteContainer": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refWasteContainer" + }, + "WasteContainerIsle": "https://uri.fiware.org/ns/datamodels/WasteContainerIsle", + "containerFix": "https://uri.fiware.org/ns/datamodels/containerFix", + "fenced": "https://uri.fiware.org/ns/datamodels/fenced", + "methaneConcentration": "https://uri.fiware.org/ns/datamodels/methaneConcentration", + "storedWasteKind": "https://uri.fiware.org/ns/datamodels/storedWasteKind", + "storedWasteOrigin": "https://uri.fiware.org/ns/datamodels/storedWasteOrigin", + "storedWasteCode": "https://uri.fiware.org/ns/datamodels/storedWasteCode", + "regulation": "https://uri.fiware.org/ns/datamodels/regulation", + "dateLastEmptying": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateLastEmptying" + }, + "actuationHours": "https://uri.fiware.org/ns/datamodels/actuationHours", + "dateLastCleaning": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateLastCleaning" + }, + "nextCleaningDeadline": "https://uri.fiware.org/ns/datamodels/nextCleaningDeadline", + "isleId": "https://uri.fiware.org/ns/datamodels/isleId", + "refWasteContainerModel": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refWasteContainerModel" + }, + "refWasteContainerIsle": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refWasteContainerIsle" + }, + "WasteContainer": "https://uri.fiware.org/ns/datamodels/WasteContainer", + "fixed": "https://uri.fiware.org/ns/datamodels/fixed", + "organic": "https://uri.fiware.org/ns/datamodels/organic", + "inorganic": "https://uri.fiware.org/ns/datamodels/inorganic", + "glass": "https://uri.fiware.org/ns/datamodels/glass", + "oil": "https://uri.fiware.org/ns/datamodels/oil", + "paper": "https://uri.fiware.org/ns/datamodels/paper", + "batteries": "https://uri.fiware.org/ns/datamodels/batteries", + "electronics": "https://uri.fiware.org/ns/datamodels/electronics", + "hazardous": "https://uri.fiware.org/ns/datamodels/hazardous", + "household": "https://uri.fiware.org/ns/datamodels/household", + "municipal": "https://uri.fiware.org/ns/datamodels/municipal", + "hostelry": "https://uri.fiware.org/ns/datamodels/hostelry", + "lidOpen": "https://uri.fiware.org/ns/datamodels/lidOpen", + "dropped": "https://uri.fiware.org/ns/datamodels/dropped", + "moved": "https://uri.fiware.org/ns/datamodels/moved", + "maximumLoad": "https://uri.fiware.org/ns/datamodels/maximumLoad", + "recommendedLoad": "https://uri.fiware.org/ns/datamodels/recommendedLoad", + "madeOf": "https://uri.fiware.org/ns/datamodels/madeOf", + "madeOfCode": "https://uri.fiware.org/ns/datamodels/madeOfCode", + "WasteContainerModel": "https://uri.fiware.org/ns/datamodels/WasteContainerModel", + "dumpster": "https://uri.fiware.org/ns/datamodels/dumpster", + "trashCan": "https://uri.fiware.org/ns/datamodels/trashCan", + "wheelieBin": "https://uri.fiware.org/ns/datamodels/wheelieBin", + "wheels": "https://uri.fiware.org/ns/datamodels/wheels", + "lid": "https://uri.fiware.org/ns/datamodels/lid", + "roundedLid": "https://uri.fiware.org/ns/datamodels/roundedLid", + "insertHoles": "https://uri.fiware.org/ns/datamodels/insertHoles", + "lockable": "https://uri.fiware.org/ns/datamodels/lockable", + "volume": "https://uri.fiware.org/ns/datamodels/volume", + "fuel": "https://uri.fiware.org/ns/datamodels/fuel", + "refPark": { + "@type": "@id", + "@id": "https://uri.fiware.org/ns/datamodels/refPark" + }, + "$id": "https://uri.fiware.org/ns/datamodels/$id", + "dateCreated": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateCreated" + }, + "dateModified": { + "@type": "http://uri.etsi.org/ngsi-ld/DateTime", + "@id": "https://uri.fiware.org/ns/datamodels/dateModified" + }, + "dataProvider": "https://uri.fiware.org/ns/datamodels/dataProvider", + "seeAlso": "https://uri.fiware.org/ns/datamodels/seeAlso" + } +} diff --git a/scorpio-broker/Examples/fixedfiwaredatamodel.jsonld b/scorpio-broker/Examples/fixedfiwaredatamodel.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..1decdb447eded4d3ad73b09213672a50e9df4a70 --- /dev/null +++ b/scorpio-broker/Examples/fixedfiwaredatamodel.jsonld @@ -0,0 +1,62 @@ +{ + "@context": { + "address": "https://schema.org/address", + "image": "https://schema.org/image", + "fullnessThreshold": "https://uri.fiware.org/ns/data-models#fullnessThreshold", + "status": { + "@id": "https://uri.fiware.org/ns/data-models#status" + }, + "actuationHours": "https://uri.fiware.org/ns/data-models#actuationHours", + "cargoWeight": "https://uri.fiware.org/ns/data-models#cargoWeight", + "category": { + "@id": "https://uri.fiware.org/ns/data-models#category", + "@type": "@id", + "@container": "@list" + }, + "color": "https://uri.fiware.org/ns/data-models#color", + "dataProvider": "https://uri.fiware.org/ns/data-models#dataProvider", + "dateLastCleaning": "https://uri.fiware.org/ns/data-models#dateLastCleaning", + "dateLastEmptying": "https://uri.fiware.org/ns/data-models#dateLastEmptying", + "dateServiceStarted": "https://uri.fiware.org/ns/data-models#dateServiceStarted", + "fillingLevel": "https://uri.fiware.org/ns/data-models#fillingLevel", + "methaneConcentration": "https://uri.fiware.org/ns/data-models#methaneConcentration", + "nextActuationDeadline": "https://uri.fiware.org/ns/data-models#nextActuationDeadline", + "nextCleaningDeadline": "https://uri.fiware.org/ns/data-models#nextCleaningDeadline", + "owner": { + "@id": "https://uri.fiware.org/ns/data-models#owner", + "@type": "@id" + }, + "refDevice": { + "@id": "https://uri.fiware.org/ns/data-models#refDevice", + "@type": "@id" + }, + "refWasteContainer": { + "@id": "https://uri.fiware.org/ns/data-models#refWasteContainer", + "@type": "@id" + }, + "refWasteContainerIsle": { + "@id": "https://uri.fiware.org/ns/data-models#refWasteContainerIsle", + "@type": "@id" + }, + "refWasteContainerModel": { + "@id": "https://uri.fiware.org/ns/data-models#refWasteContainerModel", + "@type": "@id" + }, + "regulation": "https://uri.fiware.org/ns/data-models#regulation", + "responsible": "https://uri.fiware.org/ns/data-models#responsible", + "serialNumber": "https://uri.fiware.org/ns/data-models#serialNumber", + "source": "https://uri.fiware.org/ns/data-models#source", + "storedWasteCode": "https://uri.fiware.org/ns/data-models#storedWasteCode", + "storedWasteKind": { + "@id": "https://uri.fiware.org/ns/data-models#storedWasteKind", + "@type": "@id", + "@container": "@list" + }, + "storedWasteOrigin": { + "@id": "https://uri.fiware.org/ns/data-models#storedWasteOrigin", + "@type": "@id", + "@container": "@list" + }, + "temperature": "https://uri.fiware.org/ns/data-models#temperature" + } +} diff --git a/scorpio-broker/Examples/geodataproducer.py b/scorpio-broker/Examples/geodataproducer.py new file mode 100644 index 0000000000000000000000000000000000000000..1cde02b15688c0f38a333e76a7dc1d6c1232ea4f --- /dev/null +++ b/scorpio-broker/Examples/geodataproducer.py @@ -0,0 +1,125 @@ +import urllib +import json +import time +import urllib.request + + +class GrowingList(list): + def __setitem__(self, index, value): + if index >= len(self): + self.extend([None]*(index + 1 - len(self))) + list.__setitem__(self, index, value) + +def parseContent(content, translate): + #print(content) + MY_VALUE_CHAR = "&&&&" + MY_PREFIX_CHAR = "$$" + result = {} + for value in translate: + key = translate[value] + temp = content + setValue = False + prefix = None + suffix = None + if len(value) < 4 or value[-4:] != MY_VALUE_CHAR: + for subs in key.split("."): + if(subs[:2] == MY_PREFIX_CHAR): + prefix = str(subs[2:]) + elif(subs[-2:] == MY_PREFIX_CHAR): + suffix = str(subs[:-2]) + else: + if(subs.isdigit()): + temp = temp[int(subs)] + else: + temp = temp[subs] + if prefix != None: + temp = prefix + str(temp) + key = prefix + str(key) + if suffix != None: + temp = str(temp) + suffix + key = str(key) + suffix + tempresult = result; + translationTarget = value.split(".") + mylength = len(translationTarget) + #print (value) + for i in range(0, mylength): + subs = translationTarget[i] + index = subs + #print(subs) + if (subs.isdigit()): + index = int(subs) + if(i == mylength -2 and translationTarget[i+1] == MY_VALUE_CHAR): + #print("valuechar found") + tempresult[index] = key + break + if(i == mylength -1): + if temp == None: + temp = "Not Available" + tempresult[index] = temp + break + if not index in tempresult: + if translationTarget[i+1].isdigit(): + tempresult[index] = GrowingList() + else: + tempresult[index] = {} + tempresult = tempresult[index] + return result + + +firstRun = True + +with open(str(sys.argv[1])) as json_file: + config = json.load(json_file) + +print(config) +translate = config["translate"] +#print(json.dumps(translate)) +requesthost = config["from"] +requestheaders = config["fromheaders"] +ldhost = config["to"] +ldheader = config["toheaders"] +polltime = config["polltime"] +while(True): + try: + req = urllib.request.Request(requesthost, None, requestheaders) + response = urllib.request.urlopen(req) + data = response.read().decode('utf-8') + content = json.loads(data)['features'] + + for entry in content: + ngsiLdContent = parseContent(entry, translate) + print('--------------------------') + print(json.dumps(ngsiLdContent)) + print('-------------------------') + #if firstRun: + try: + req = urllib.request.Request(ldhost + "/ngsi-ld/v1/entities/", json.dumps(ngsiLdContent).encode('utf-8'), ldheader) + response = urllib.request.urlopen(req) + except Exception as e: + print(e) + id = ngsiLdContent["id"].strip() + del ngsiLdContent["id"] + del ngsiLdContent["type"] + if "name" in ngsiLdContent: + del ngsiLdContent["name"] + #print(ldhost + "/ngsi-ld/v1/entities/" + id + "/attrs") + req = urllib.request.Request(ldhost + "/ngsi-ld/v1/entities/" + id + "/attrs", json.dumps(ngsiLdContent).encode('utf-8'), ldheader, method="PATCH") + response = urllib.request.urlopen(req) +# firstRun = False +# else: +# try: +# id = ngsiLdContent["id"].strip() +# del ngsiLdContent["id"] +# del ngsiLdContent["type"] +# if "name" in ngsiLdContent: +# del ngsiLdContent["name"] +# req = urllib.request.Request(ldhost + "/ngsi-ld/v1/entities/" + id + "/attrs", json.dumps(ngsiLdContent).encode('utf-8'), ldheader,method="PATCH") +# response = urllib.request.urlopen(req) +# except Exception as e: +# #print(e) +# pass + except Exception as e: + print(e) + pass + time.sleep(polltime) + diff --git a/scorpio-broker/Examples/index.json b/scorpio-broker/Examples/index.json new file mode 100644 index 0000000000000000000000000000000000000000..c7873bc4dc40cb45bf32c3ac98d8efce7e29f205 --- /dev/null +++ b/scorpio-broker/Examples/index.json @@ -0,0 +1,30 @@ +{ + "@context": [{ + "numberExample": "http://example.org/numberExample", + "stringExample": "http://example.org/stringExample", + "dateTimeExample": "http://example.org/dateTimeExample", + "dateExample": "http://example.org/dateExample", + "timeExample": "http://example.org/timeExample", + "otherValueExample": "http://example.org/otherValueExample", + "trueExample": "http://example.org/trueExample", + "falseExample": "http://example.org/falseExample", + "nullExample": "http://example.org/nullExample", + "uriExample": "http://example.org/uriExample", + "structuredExample1": "http://example.org/structuredExample1", + "structuredExample2": "http://example.org/structuredExample2", + "topLevelExample": "http://example.org/topLevelExample", + "subPropertyExample": "http://example.org/subPropertyExample", + "relationshipExample": "http://example.org/relationshipExample", + "Vehicle": "http://example.org/vehicle/Vehicle", + "brandName": "http://example.org/vehicle/brandName", + "speed": "http://example.org/vehicle/speed", + "isParked": { + "@type": "@id", + "@id": "http://example.org/common/isParked" + }, + "providedBy": { + "@type": "@id", + "@id": "http://example.org/common/providedBy" + } + }] +} diff --git a/scorpio-broker/Examples/old-fiware-context.jsonld b/scorpio-broker/Examples/old-fiware-context.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..fd86b242686f70a90006ce9e7087c0cf77c04a09 --- /dev/null +++ b/scorpio-broker/Examples/old-fiware-context.jsonld @@ -0,0 +1,1879 @@ +{ + "@context": [ + { + "0": "https://uri.fiware.org/ns/datamodels#0", + "1": "https://uri.fiware.org/ns/datamodels#1", + "2": "https://uri.fiware.org/ns/datamodels#2", + "3": "https://uri.fiware.org/ns/datamodels#3", + "37": "https://uri.fiware.org/ns/datamodels#37", + "37,38": "https://uri.fiware.org/ns/datamodels#37,38", + "37,38,39": "https://uri.fiware.org/ns/datamodels#37,38,39", + "37,39": "https://uri.fiware.org/ns/datamodels#37,39", + "38": "https://uri.fiware.org/ns/datamodels#38", + "38,39": "https://uri.fiware.org/ns/datamodels#38,39", + "39": "https://uri.fiware.org/ns/datamodels#39", + "4": "https://uri.fiware.org/ns/datamodels#4", + "5": "https://uri.fiware.org/ns/datamodels#5", + "6": "https://uri.fiware.org/ns/datamodels#6", + "7": "https://uri.fiware.org/ns/datamodels#7", + "AeroAllergenObserved": "https://uri.fiware.org/ns/datamodels/AeroAllergenObserved", + "AgriApp": "https://uri.fiware.org/ns/datamodels/AgriApp", + "AgriCrop": "https://uri.fiware.org/ns/datamodels/AgriCrop", + "AgriFarm": "https://uri.fiware.org/ns/datamodels/AgriFarm", + "AgriGreenhouse": "https://uri.fiware.org/ns/datamodels/AgriGreenhouse", + "AgriParcel": "https://uri.fiware.org/ns/datamodels/AgriParcel", + "AgriParcelOperation": "https://uri.fiware.org/ns/datamodels/AgriParcelOperation", + "AgriParcelRecord": "https://uri.fiware.org/ns/datamodels/AgriParcelRecord", + "AgriPest": "https://uri.fiware.org/ns/datamodels/AgriPest", + "AgriProductType": "https://uri.fiware.org/ns/datamodels/AgriProductType", + "AgriSoil": "https://uri.fiware.org/ns/datamodels/AgriSoil", + "AirQualityObserved": "https://uri.fiware.org/ns/datamodels/AirQualityObserved", + "Alert": "https://uri.fiware.org/ns/datamodels/Alert", + "Animal": "https://uri.fiware.org/ns/datamodels/Animal", + "ArrivalEstimation": "https://uri.fiware.org/ns/datamodels/ArrivalEstimation", + "Beach": "https://uri.fiware.org/ns/datamodels/Beach", + "BikeHireDockingStation": "https://uri.fiware.org/ns/datamodels/BikeHireDockingStation", + "Building": "https://uri.fiware.org/ns/datamodels/Building", + "BuildingOperation": "https://uri.fiware.org/ns/datamodels/BuildingOperation", + "ByBankTransferInAdvance": "http://purl.org/goodrelations/v1/ByBankTransferInAdvance", + "ByInvoice": "http://purl.org/goodrelations/v1/ByInvoice", + "C0": "https://uri.fiware.org/ns/datamodels/C0", + "C1": "https://uri.fiware.org/ns/datamodels/C1", + "C2": "https://uri.fiware.org/ns/datamodels/C2", + "CCS/SAE": "https://uri.fiware.org/ns/datamodels/CCS/SAE", + "CHAdeMO": "https://uri.fiware.org/ns/datamodels/CHAdeMO", + "COD": "http://purl.org/goodrelations/v1/COD", + "Caravan_Mains_Socket": "https://uri.fiware.org/ns/datamodels/Caravan_Mains_Socket", + "Cash": "http://purl.org/goodrelations/v1/Cash", + "CheckInAdvance": "http://purl.org/goodrelations/v1/CheckInAdvance", + "Chla": "https://uri.fiware.org/ns/datamodels/Chla", + "Cl-": "https://uri.fiware.org/ns/datamodels/Cl-", + "CrowdFlowObserved": "https://uri.fiware.org/ns/datamodels/CrowdFlowObserved", + "Device": "https://uri.fiware.org/ns/datamodels/Device", + "DeviceModel": "https://uri.fiware.org/ns/datamodels/DeviceModel", + "DirectDebit": "http://purl.org/goodrelations/v1/DirectDebit", + "Dual_CHAdeMO": "https://uri.fiware.org/ns/datamodels/Dual_CHAdeMO", + "Dual_J-1772": "https://uri.fiware.org/ns/datamodels/Dual_J-1772", + "Dual_Mennekes": "https://uri.fiware.org/ns/datamodels/Dual_Mennekes", + "E0": "https://uri.fiware.org/ns/datamodels/E0", + "E1": "https://uri.fiware.org/ns/datamodels/E1", + "E2": "https://uri.fiware.org/ns/datamodels/E2", + "E9": "https://uri.fiware.org/ns/datamodels/E9", + "EVChargingStation": "https://uri.fiware.org/ns/datamodels/EVChargingStation", + "FlowerBed": "https://uri.fiware.org/ns/datamodels/FlowerBed", + "Garden": "https://uri.fiware.org/ns/datamodels/Garden", + "GoogleCheckout": "http://purl.org/goodrelations/v1/GoogleCheckout", + "GreenspaceRecord": "https://uri.fiware.org/ns/datamodels/GreenspaceRecord", + "GtfsAccessPoint": "https://uri.fiware.org/ns/datamodels/GtfsAccessPoint", + "GtfsAgency": "https://uri.fiware.org/ns/datamodels/GtfsAgency", + "GtfsCalendarDateRule": "https://uri.fiware.org/ns/datamodels/GtfsCalendarDateRule", + "GtfsCalendarRule": "https://uri.fiware.org/ns/datamodels/GtfsCalendarRule", + "GtfsFrequency": "https://uri.fiware.org/ns/datamodels/GtfsFrequency", + "GtfsRoute": "https://uri.fiware.org/ns/datamodels/GtfsRoute", + "GtfsService": "https://uri.fiware.org/ns/datamodels/GtfsService", + "GtfsShape": "https://uri.fiware.org/ns/datamodels/GtfsShape", + "GtfsStation": "https://uri.fiware.org/ns/datamodels/GtfsStation", + "GtfsStop": "https://uri.fiware.org/ns/datamodels/GtfsStop", + "GtfsStopTime": "https://uri.fiware.org/ns/datamodels/GtfsStopTime", + "GtfsTransferRule": "https://uri.fiware.org/ns/datamodels/GtfsTransferRule", + "GtfsTrip": "https://uri.fiware.org/ns/datamodels/GtfsTrip", + "HPS": "https://uri.fiware.org/ns/datamodels/HPS", + "J-1772": "https://uri.fiware.org/ns/datamodels/J-1772", + "KeyPerformanceIndicator": "https://uri.fiware.org/ns/datamodels/KeyPerformanceIndicator", + "LED": "https://uri.fiware.org/ns/datamodels/LED", + "LPS": "https://uri.fiware.org/ns/datamodels/LPS", + "Mennekes": "https://uri.fiware.org/ns/datamodels/Mennekes", + "Museum": "https://uri.fiware.org/ns/datamodels/Museum", + "NH3": "https://uri.fiware.org/ns/datamodels/NH3", + "NH4": "https://uri.fiware.org/ns/datamodels/NH4", + "NO3": "https://uri.fiware.org/ns/datamodels/NO3", + "NoiseLevelObserved": "https://uri.fiware.org/ns/datamodels/NoiseLevelObserved", + "O2": "https://uri.fiware.org/ns/datamodels/O2", + "OffStreetParking": "https://uri.fiware.org/ns/datamodels/OffStreetParking", + "OnStreetParking": "https://uri.fiware.org/ns/datamodels/OnStreetParking", + "Open311ServiceRequest": "https://uri.fiware.org/ns/datamodels/Open311ServiceRequest", + "Open311ServiceType": "https://uri.fiware.org/ns/datamodels/Open311ServiceType", + "Other": "https://uri.fiware.org/ns/datamodels/Other", + "PC": "https://uri.fiware.org/ns/datamodels/PC", + "PE": "https://uri.fiware.org/ns/datamodels/PE", + "ParkingAccess": "https://uri.fiware.org/ns/datamodels/ParkingAccess", + "ParkingGroup": "https://uri.fiware.org/ns/datamodels/ParkingGroup", + "ParkingSpot": "https://uri.fiware.org/ns/datamodels/ParkingSpot", + "PayPal": "http://purl.org/goodrelations/v1/PayPal", + "PaySwarm": "http://purl.org/goodrelations/v1/PaySwarm", + "Point": "https://uri.fiware.org/ns/datamodels/Point", + "PointOfInterest": "https://uri.fiware.org/ns/datamodels/PointOfInterest", + "Q-Quality": "https://uri.fiware.org/ns/datamodels/Q-Quality", + "Road": "https://uri.fiware.org/ns/datamodels/Road", + "RoadSegment": "https://uri.fiware.org/ns/datamodels/RoadSegment", + "SmartPointOfInteraction": "https://uri.fiware.org/ns/datamodels/SmartPointOfInteraction", + "SmartSpot": "https://uri.fiware.org/ns/datamodels/SmartSpot", + "Streetlight": "https://uri.fiware.org/ns/datamodels/Streetlight", + "StreetlightControlCabinet": "https://uri.fiware.org/ns/datamodels/StreetlightControlCabinet", + "StreetlightGroup": "https://uri.fiware.org/ns/datamodels/StreetlightGroup", + "StreetlightModel": "https://uri.fiware.org/ns/datamodels/StreetlightModel", + "TBD": "https://uri.fiware.org/ns/datamodels/TBD", + "Tesla": "https://uri.fiware.org/ns/datamodels/Tesla", + "ThreePhaseAcMeasurement": "https://uri.fiware.org/ns/datamodels/ThreePhaseAcMeasurement", + "TimeInstant": { + "@id": "https://uri.fiware.org/ns/datamodels/TimeInstant", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "TrafficFlowObserved": "https://uri.fiware.org/ns/datamodels/TrafficFlowObserved", + "Type2": "https://uri.fiware.org/ns/datamodels/Type2", + "Type3": "https://uri.fiware.org/ns/datamodels/Type3", + "UserActivity": "https://uri.fiware.org/ns/datamodels/UserActivity", + "UserContext": "https://uri.fiware.org/ns/datamodels/UserContext", + "Vehicle": "https://uri.fiware.org/ns/datamodels/Vehicle", + "VehicleModel": "https://uri.fiware.org/ns/datamodels/VehicleModel", + "Wall_Euro": "https://uri.fiware.org/ns/datamodels/Wall_Euro", + "WasteContainer": "https://uri.fiware.org/ns/datamodels/WasteContainer", + "WasteContainerIsle": "https://uri.fiware.org/ns/datamodels/WasteContainerIsle", + "WasteContainerModel": "https://uri.fiware.org/ns/datamodels/WasteContainerModel", + "WaterQualityObserved": "https://uri.fiware.org/ns/datamodels/WaterQualityObserved", + "WeatherForecast": "https://uri.fiware.org/ns/datamodels/WeatherForecast", + "WeatherObserved": "https://uri.fiware.org/ns/datamodels/WeatherObserved", + "aborted": "https://uri.fiware.org/ns/datamodels/aborted", + "abs": "https://uri.fiware.org/ns/datamodels/abs", + "acceptedPaymentMethod": { + "@id": "https://uri.fiware.org/ns/datamodels/acceptedPaymentMethod", + "@type": "@vocab" + }, + "accessType": { + "@id": "https://uri.fiware.org/ns/datamodels/accessType", + "@type": "@vocab" + }, + "accessforDisabled": "https://uri.fiware.org/ns/datamodels/accessforDisabled", + "acropolis": "https://uri.fiware.org/ns/datamodels/acropolis", + "actionable": "https://uri.fiware.org/ns/datamodels/actionable", + "active": "https://uri.fiware.org/ns/datamodels/active", + "activeEnergyExport": "https://uri.fiware.org/ns/datamodels/activeEnergyExport", + "activeEnergyImport": "https://uri.fiware.org/ns/datamodels/activeEnergyImport", + "activePower": "https://uri.fiware.org/ns/datamodels/activePower", + "activeProgramId": "https://uri.fiware.org/ns/datamodels/activeProgramId", + "activityType": "https://uri.fiware.org/ns/datamodels/activityType", + "actuationHours": "https://uri.fiware.org/ns/datamodels/actuationHours", + "additionalIntervalPrice": "https://uri.fiware.org/ns/datamodels/additionalIntervalPrice", + "address": "https://schema.org/address", + "adequate": "https://uri.fiware.org/ns/datamodels/adequate", + "aggregateRating": "https://schema.org/aggregateRating", + "aggregatedData": "https://uri.fiware.org/ns/datamodels/aggregatedData", + "agriculturalVehicle": "https://uri.fiware.org/ns/datamodels/agriculturalVehicle", + "agriculture": "https://uri.fiware.org/ns/datamodels/agriculture", + "agroVocConcept": "https://uri.fiware.org/ns/datamodels/agroVocConcept", + "airPollution": "https://uri.fiware.org/ns/datamodels/airPollution", + "airQualityIndex": "https://uri.fiware.org/ns/datamodels/airQualityIndex", + "airQualityLevel": "https://uri.fiware.org/ns/datamodels/airQualityLevel", + "airbag": "https://uri.fiware.org/ns/datamodels/airbag", + "airportTerminal": "https://uri.fiware.org/ns/datamodels/airportTerminal", + "alarm": "https://uri.fiware.org/ns/datamodels/alarm", + "alcazaba": "https://uri.fiware.org/ns/datamodels/alcazaba", + "alcazar": "https://uri.fiware.org/ns/datamodels/alcazar", + "alertSource": { + "@id": "https://uri.fiware.org/ns/datamodels/alertSource", + "@type": "@id" + }, + "allergenRisk": { + "@id": "https://uri.fiware.org/ns/datamodels/allergenRisk", + "@type": "@vocab" + }, + "allowedVehicleType": { + "@id": "https://uri.fiware.org/ns/datamodels/allowedVehicleType", + "@type": "@vocab" + }, + "almostEmpty": "https://uri.fiware.org/ns/datamodels/almostEmpty", + "almostFull": "https://uri.fiware.org/ns/datamodels/almostFull", + "alternateName": "https://schema.org/alternateName", + "aluminium": "https://uri.fiware.org/ns/datamodels/aluminium", + "always-ON": "https://uri.fiware.org/ns/datamodels/always-ON", + "ambulance": "https://uri.fiware.org/ns/datamodels/ambulance", + "amperage": "https://uri.fiware.org/ns/datamodels/amperage", + "amphitheatre": "https://uri.fiware.org/ns/datamodels/amphitheatre", + "annotations": "https://uri.fiware.org/ns/datamodels/annotations", + "announcedUrl": "https://uri.fiware.org/ns/datamodels/announcedUrl", + "announcementPeriod": "https://uri.fiware.org/ns/datamodels/announcementPeriod", + "annualPayment": "https://uri.fiware.org/ns/datamodels/annualPayment", + "anyVehicle": "https://uri.fiware.org/ns/datamodels/anyVehicle", + "apartments": "https://uri.fiware.org/ns/datamodels/apartments", + "apparentEnergyExport": "https://uri.fiware.org/ns/datamodels/apparentEnergyExport", + "apparentEnergyImport": "https://uri.fiware.org/ns/datamodels/apparentEnergyImport", + "apparentPower": "https://uri.fiware.org/ns/datamodels/apparentPower", + "applicationUrl": "https://uri.fiware.org/ns/datamodels/applicationUrl", + "appliedArts": "https://uri.fiware.org/ns/datamodels/appliedArts", + "appliesOn": "https://uri.fiware.org/ns/datamodels/appliesOn", + "aqueduct": "https://uri.fiware.org/ns/datamodels/aqueduct", + "arcade": "https://uri.fiware.org/ns/datamodels/arcade", + "arch": "https://uri.fiware.org/ns/datamodels/arch", + "archaeology": "https://uri.fiware.org/ns/datamodels/archaeology", + "archeologicalSite": "https://uri.fiware.org/ns/datamodels/archeologicalSite", + "architecture": "https://uri.fiware.org/ns/datamodels/architecture", + "area": "https://uri.fiware.org/ns/datamodels/area", + "areaCovered": "https://uri.fiware.org/ns/datamodels/areaCovered", + "areaSeperatedFromSurroundings": "https://uri.fiware.org/ns/datamodels/areaSeperatedFromSurroundings", + "areaServed": "https://schema.org/areaServed", + "arrivalEstimationUpdate": { + "@id": "https://uri.fiware.org/ns/datamodels/arrivalEstimationUpdate", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "arrivalTime": "https://uri.fiware.org/ns/datamodels/arrivalTime", + "artPeriod": "https://uri.fiware.org/ns/datamodels/artPeriod", + "assault": "https://uri.fiware.org/ns/datamodels/assault", + "asthmaAttack": "https://uri.fiware.org/ns/datamodels/asthmaAttack", + "astronomicalClock": "https://uri.fiware.org/ns/datamodels/astronomicalClock", + "atmosphericPressure": "https://uri.fiware.org/ns/datamodels/atmosphericPressure", + "audioguide": "https://uri.fiware.org/ns/datamodels/audioguide", + "auditory": "https://uri.fiware.org/ns/datamodels/auditory", + "autogas": "https://uri.fiware.org/ns/datamodels/autogas", + "automatedParkingGarage": "https://uri.fiware.org/ns/datamodels/automatedParkingGarage", + "automatic": "https://uri.fiware.org/ns/datamodels/automatic", + "automaticParkingGuidance": "https://uri.fiware.org/ns/datamodels/automaticParkingGuidance", + "auxiliaryServices": "https://uri.fiware.org/ns/datamodels/auxiliaryServices", + "availability": "https://uri.fiware.org/ns/datamodels/availability", + "availableBikeNumber": "https://uri.fiware.org/ns/datamodels/availableBikeNumber", + "availableCapacity": "https://uri.fiware.org/ns/datamodels/availableCapacity", + "availableSpotNumber": "https://uri.fiware.org/ns/datamodels/availableSpotNumber", + "avalancheRisk": "https://uri.fiware.org/ns/datamodels/avalancheRisk", + "averageCrowdSpeed": "https://uri.fiware.org/ns/datamodels/averageCrowdSpeed", + "averageGapDistance": "https://uri.fiware.org/ns/datamodels/averageGapDistance", + "averageHeadwayTime": "https://uri.fiware.org/ns/datamodels/averageHeadwayTime", + "averageSpotLength": "https://uri.fiware.org/ns/datamodels/averageSpotLength", + "averageSpotWidth": "https://uri.fiware.org/ns/datamodels/averageSpotWidth", + "averageVehicleLength": "https://uri.fiware.org/ns/datamodels/averageVehicleLength", + "averageVehicleSpeed": "https://uri.fiware.org/ns/datamodels/averageVehicleSpeed", + "backCamera": "https://uri.fiware.org/ns/datamodels/backCamera", + "backward": "https://uri.fiware.org/ns/datamodels/backward", + "bacteria": "https://uri.fiware.org/ns/datamodels/bacteria", + "bad": "https://uri.fiware.org/ns/datamodels/bad", + "bakehouse": "https://uri.fiware.org/ns/datamodels/bakehouse", + "balancing": "https://uri.fiware.org/ns/datamodels/balancing", + "bandalized": "https://uri.fiware.org/ns/datamodels/bandalized", + "barn": "https://uri.fiware.org/ns/datamodels/barn", + "barrierAccess": "https://uri.fiware.org/ns/datamodels/barrierAccess", + "basilica": "https://uri.fiware.org/ns/datamodels/basilica", + "batteries": "https://uri.fiware.org/ns/datamodels/batteries", + "batteryLevel": "https://uri.fiware.org/ns/datamodels/batteryLevel", + "bbox": { + "@container": "@list", + "@id": "https://purl.org/geojson/vocab#bbox" + }, + "beachType": { + "@id": "https://uri.fiware.org/ns/datamodels/beachType", + "@type": "@vocab" + }, + "belongsTo": { + "@id": "https://uri.fiware.org/ns/datamodels/belongsTo", + "@type": "@id" + }, + "bicycle": "https://uri.fiware.org/ns/datamodels/bicycle", + "bikeParking": "https://uri.fiware.org/ns/datamodels/bikeParking", + "bimonthly": "https://uri.fiware.org/ns/datamodels/bimonthly", + "binTrolley": "https://uri.fiware.org/ns/datamodels/binTrolley", + "biodiesel ethanol": "https://uri.fiware.org/ns/datamodels/biodiesel ethanol", + "birthdate": { + "@id": "https://uri.fiware.org/ns/datamodels/birthdate", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "biweekly": "https://uri.fiware.org/ns/datamodels/biweekly", + "blackSand": "https://uri.fiware.org/ns/datamodels/blackSand", + "block": "https://uri.fiware.org/ns/datamodels/block", + "blueFlag": "https://uri.fiware.org/ns/datamodels/blueFlag", + "bluetoothChannel": { + "@id": "https://uri.fiware.org/ns/datamodels/bluetoothChannel", + "@type": "@vocab" + }, + "boat": "https://uri.fiware.org/ns/datamodels/boat", + "bollard": "https://uri.fiware.org/ns/datamodels/bollard", + "bootingUp": "https://uri.fiware.org/ns/datamodels/bootingUp", + "borehole": "https://uri.fiware.org/ns/datamodels/borehole", + "botanical": "https://uri.fiware.org/ns/datamodels/botanical", + "brandName": "https://uri.fiware.org/ns/datamodels/brandName", + "breed": "https://uri.fiware.org/ns/datamodels/breed", + "bridge": "https://uri.fiware.org/ns/datamodels/bridge", + "broken": "https://uri.fiware.org/ns/datamodels/broken", + "brokenLantern": "https://uri.fiware.org/ns/datamodels/brokenLantern", + "buildingFire": "https://uri.fiware.org/ns/datamodels/buildingFire", + "buildingType": { + "@id": "https://uri.fiware.org/ns/datamodels/buildingType", + "@type": "@vocab" + }, + "bullfighting": "https://uri.fiware.org/ns/datamodels/bullfighting", + "bullfightingRing": "https://uri.fiware.org/ns/datamodels/bullfightingRing", + "bumpedPatient": "https://uri.fiware.org/ns/datamodels/bumpedPatient", + "bungalow": "https://uri.fiware.org/ns/datamodels/bungalow", + "bunker": "https://uri.fiware.org/ns/datamodels/bunker", + "burialMound": "https://uri.fiware.org/ns/datamodels/burialMound", + "burning": "https://uri.fiware.org/ns/datamodels/burning", + "bus": "https://uri.fiware.org/ns/datamodels/bus", + "businessTarget": "https://uri.fiware.org/ns/datamodels/businessTarget", + "cabin": "https://uri.fiware.org/ns/datamodels/cabin", + "cableCarStation": "https://uri.fiware.org/ns/datamodels/cableCarStation", + "cafeteria": "https://uri.fiware.org/ns/datamodels/cafeteria", + "calculatedBy": "https://uri.fiware.org/ns/datamodels/calculatedBy", + "calculationFormula": "https://uri.fiware.org/ns/datamodels/calculationFormula", + "calculationFrequency": { + "@id": "https://uri.fiware.org/ns/datamodels/calculationFrequency", + "@type": "@vocab" + }, + "calculationMethod": { + "@id": "https://uri.fiware.org/ns/datamodels/calculationMethod", + "@type": "@vocab" + }, + "calculationPeriod": "https://uri.fiware.org/ns/datamodels/calculationPeriod", + "calmWaters": "https://uri.fiware.org/ns/datamodels/calmWaters", + "calvedBy": { + "@id": "https://uri.fiware.org/ns/datamodels/calvedBy", + "@type": "@id" + }, + "campground": "https://uri.fiware.org/ns/datamodels/campground", + "cancelled": "https://uri.fiware.org/ns/datamodels/cancelled", + "capacity": "https://uri.fiware.org/ns/datamodels/capacity", + "car": "https://uri.fiware.org/ns/datamodels/car", + "carAccident": "https://uri.fiware.org/ns/datamodels/carAccident", + "carSharing": "https://uri.fiware.org/ns/datamodels/carSharing", + "carStopped": "https://uri.fiware.org/ns/datamodels/carStopped", + "carWithCaravan": "https://uri.fiware.org/ns/datamodels/carWithCaravan", + "carWithTrailer": "https://uri.fiware.org/ns/datamodels/carWithTrailer", + "carWrongDirection": "https://uri.fiware.org/ns/datamodels/carWrongDirection", + "caravan": "https://uri.fiware.org/ns/datamodels/caravan", + "cargoTransport": "https://uri.fiware.org/ns/datamodels/cargoTransport", + "cargoVolume": "https://uri.fiware.org/ns/datamodels/cargoVolume", + "cargoWeight": "https://uri.fiware.org/ns/datamodels/cargoWeight", + "carport": "https://uri.fiware.org/ns/datamodels/carport", + "carports": "https://uri.fiware.org/ns/datamodels/carports", + "cartuja": "https://uri.fiware.org/ns/datamodels/cartuja", + "cashMachine": "https://uri.fiware.org/ns/datamodels/cashMachine", + "castle": "https://uri.fiware.org/ns/datamodels/castle", + "castro": "https://uri.fiware.org/ns/datamodels/castro", + "catacombs": "https://uri.fiware.org/ns/datamodels/catacombs", + "category": { + "@id": "https://uri.fiware.org/ns/datamodels/category", + "@type": "@vocab" + }, + "cathedral": "https://uri.fiware.org/ns/datamodels/cathedral", + "cathedralMuseum": "https://uri.fiware.org/ns/datamodels/cathedralMuseum", + "cavesAndTouristicMines": "https://uri.fiware.org/ns/datamodels/cavesAndTouristicMines", + "cctv": "https://uri.fiware.org/ns/datamodels/cctv", + "cdom": "https://uri.fiware.org/ns/datamodels/cdom", + "centralIsland": "https://uri.fiware.org/ns/datamodels/centralIsland", + "ceramics": "https://uri.fiware.org/ns/datamodels/ceramics", + "chapel": "https://uri.fiware.org/ns/datamodels/chapel", + "chargeType": { + "@id": "https://uri.fiware.org/ns/datamodels/chargeType", + "@type": "@vocab" + }, + "chinese": "https://uri.fiware.org/ns/datamodels/chinese", + "church": "https://uri.fiware.org/ns/datamodels/church", + "cinema": "https://uri.fiware.org/ns/datamodels/cinema", + "circuit": "https://uri.fiware.org/ns/datamodels/circuit", + "circular": "https://uri.fiware.org/ns/datamodels/circular", + "circus": "https://uri.fiware.org/ns/datamodels/circus", + "civic": "https://uri.fiware.org/ns/datamodels/civic", + "civilDisorder": "https://uri.fiware.org/ns/datamodels/civilDisorder", + "civilEngineering": "https://uri.fiware.org/ns/datamodels/civilEngineering", + "cleaningServices": "https://uri.fiware.org/ns/datamodels/cleaningServices", + "cleaningTrolley": "https://uri.fiware.org/ns/datamodels/cleaningTrolley", + "cloakRoom": "https://uri.fiware.org/ns/datamodels/cloakRoom", + "cloister": "https://uri.fiware.org/ns/datamodels/cloister", + "closed": "https://uri.fiware.org/ns/datamodels/closed", + "closedAbnormal": "https://uri.fiware.org/ns/datamodels/closedAbnormal", + "cng": "https://uri.fiware.org/ns/datamodels/cng", + "co-creation": "https://uri.fiware.org/ns/datamodels/co-creation", + "co2": "https://uri.fiware.org/ns/datamodels/co2", + "coachStation": "https://uri.fiware.org/ns/datamodels/coachStation", + "coastalEvent": "https://uri.fiware.org/ns/datamodels/coastalEvent", + "code": "https://uri.fiware.org/ns/datamodels/code", + "coldWave": "https://uri.fiware.org/ns/datamodels/coldWave", + "color": "https://uri.fiware.org/ns/datamodels/color", + "colorRenderingIndex": "https://uri.fiware.org/ns/datamodels/colorRenderingIndex", + "colorTemperature": "https://uri.fiware.org/ns/datamodels/colorTemperature", + "columnBrandName": "https://uri.fiware.org/ns/datamodels/columnBrandName", + "columnColor": "https://uri.fiware.org/ns/datamodels/columnColor", + "columnIssue": "https://uri.fiware.org/ns/datamodels/columnIssue", + "columnMadeOf": { + "@id": "https://uri.fiware.org/ns/datamodels/columnMadeOf", + "@type": "@vocab" + }, + "columnManufacturerName": "https://uri.fiware.org/ns/datamodels/columnManufacturerName", + "columnModelName": "https://uri.fiware.org/ns/datamodels/columnModelName", + "commercial": "https://uri.fiware.org/ns/datamodels/commercial", + "commercial supply": "https://uri.fiware.org/ns/datamodels/commercial supply", + "community": "https://uri.fiware.org/ns/datamodels/community", + "compliantWith": "https://uri.fiware.org/ns/datamodels/compliantWith", + "concrete": "https://uri.fiware.org/ns/datamodels/concrete", + "conductance": "https://uri.fiware.org/ns/datamodels/conductance", + "conductivity": "https://uri.fiware.org/ns/datamodels/conductivity", + "conferenceRoom": "https://uri.fiware.org/ns/datamodels/conferenceRoom", + "configuration": "https://uri.fiware.org/ns/datamodels/configuration", + "congested": "https://uri.fiware.org/ns/datamodels/congested", + "conservatory": "https://uri.fiware.org/ns/datamodels/conservatory", + "construction": "https://uri.fiware.org/ns/datamodels/construction", + "constructionOrMaintenanceVehicle": "https://uri.fiware.org/ns/datamodels/constructionOrMaintenanceVehicle", + "contactPoint": "https://uri.fiware.org/ns/datamodels/contactPoint", + "containedInPlace": "https://uri.fiware.org/ns/datamodels/containedInPlace", + "containerFix": "https://uri.fiware.org/ns/datamodels/containerFix", + "contemporaryArt": "https://uri.fiware.org/ns/datamodels/contemporaryArt", + "controlledAsset": { + "@id": "https://uri.fiware.org/ns/datamodels/controlledAsset", + "@type": "@id" + }, + "controlledProperty": { + "@id": "https://uri.fiware.org/ns/datamodels/controlledProperty", + "@type": "@vocab" + }, + "controllingMethod": { + "@id": "https://uri.fiware.org/ns/datamodels/controllingMethod", + "@type": "@vocab" + }, + "convent": "https://uri.fiware.org/ns/datamodels/convent", + "conventionCentre": "https://uri.fiware.org/ns/datamodels/conventionCentre", + "copyMachineOrService": "https://uri.fiware.org/ns/datamodels/copyMachineOrService", + "cosPhi": "https://uri.fiware.org/ns/datamodels/cosPhi", + "coverageRadius": "https://uri.fiware.org/ns/datamodels/coverageRadius", + "covered": "https://uri.fiware.org/ns/datamodels/covered", + "cow": "https://uri.fiware.org/ns/datamodels/cow", + "cowshed": "https://uri.fiware.org/ns/datamodels/cowshed", + "critical": "https://uri.fiware.org/ns/datamodels/critical", + "cropNutrition": "https://uri.fiware.org/ns/datamodels/cropNutrition", + "cropProtection": "https://uri.fiware.org/ns/datamodels/cropProtection", + "cropStatus": { + "@id": "https://uri.fiware.org/ns/datamodels/cropStatus", + "@type": "@vocab" + }, + "cropVariety": "https://uri.fiware.org/ns/datamodels/cropVariety", + "cupboardMadeOf": { + "@id": "https://uri.fiware.org/ns/datamodels/cupboardMadeOf", + "@type": "@vocab" + }, + "current": "https://uri.fiware.org/ns/datamodels/current", + "currentStanding": { + "@id": "https://uri.fiware.org/ns/datamodels/currentStanding", + "@type": "@vocab" + }, + "daily": "https://uri.fiware.org/ns/datamodels/daily", + "dailyLight": "https://uri.fiware.org/ns/datamodels/dailyLight", + "dataProvider": "https://uri.fiware.org/ns/datamodels/dataProvider", + "dateActivityEnded": { + "@id": "https://uri.fiware.org/ns/datamodels/dateActivityEnded", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateActivityStarted": { + "@id": "https://uri.fiware.org/ns/datamodels/dateActivityStarted", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateCreated": { + "@id": "https://uri.fiware.org/ns/datamodels/dateCreated", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateEnergyMeteringStarted": { + "@id": "https://uri.fiware.org/ns/datamodels/dateEnergyMeteringStarted", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateExpires": { + "@id": "https://uri.fiware.org/ns/datamodels/dateExpires", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateFinished": { + "@id": "https://uri.fiware.org/ns/datamodels/dateFinished", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateFirstUsed": { + "@id": "https://uri.fiware.org/ns/datamodels/dateFirstUsed", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateInstalled": { + "@id": "https://uri.fiware.org/ns/datamodels/dateInstalled", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateIssued": { + "@id": "https://uri.fiware.org/ns/datamodels/dateIssued", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateLastCalibration": { + "@id": "https://uri.fiware.org/ns/datamodels/dateLastCalibration", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateLastCleaning": { + "@id": "https://uri.fiware.org/ns/datamodels/dateLastCleaning", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateLastEmptying": { + "@id": "https://uri.fiware.org/ns/datamodels/dateLastEmptying", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateLastLampChange": { + "@id": "https://uri.fiware.org/ns/datamodels/dateLastLampChange", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateLastProgramming": { + "@id": "https://uri.fiware.org/ns/datamodels/dateLastProgramming", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateLastSwitchingOff": { + "@id": "https://uri.fiware.org/ns/datamodels/dateLastSwitchingOff", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateLastSwitchingOn": { + "@id": "https://uri.fiware.org/ns/datamodels/dateLastSwitchingOn", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateLastValueReported": { + "@id": "https://uri.fiware.org/ns/datamodels/dateLastValueReported", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateLastWatering": { + "@id": "https://uri.fiware.org/ns/datamodels/dateLastWatering", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateManufactured": { + "@id": "https://uri.fiware.org/ns/datamodels/dateManufactured", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateMeteringStarted": { + "@id": "https://uri.fiware.org/ns/datamodels/dateMeteringStarted", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateModified": { + "@id": "https://uri.fiware.org/ns/datamodels/dateModified", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateNextCalculation": "https://uri.fiware.org/ns/datamodels/dateNextCalculation", + "dateObserved": { + "@id": "https://uri.fiware.org/ns/datamodels/dateObserved", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateObservedFrom": { + "@id": "https://uri.fiware.org/ns/datamodels/dateObservedFrom", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateObservedTo": { + "@id": "https://uri.fiware.org/ns/datamodels/dateObservedTo", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateRetrieved": { + "@id": "https://uri.fiware.org/ns/datamodels/dateRetrieved", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateServiceStarted": { + "@id": "https://uri.fiware.org/ns/datamodels/dateServiceStarted", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateStarted": { + "@id": "https://uri.fiware.org/ns/datamodels/dateStarted", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "dateVehicleFirstRegistered": "https://uri.fiware.org/ns/datamodels/dateVehicleFirstRegistered", + "day-LOW": "https://uri.fiware.org/ns/datamodels/day-LOW", + "day-OFF": "https://uri.fiware.org/ns/datamodels/day-OFF", + "day-ON": "https://uri.fiware.org/ns/datamodels/day-ON", + "dayMaximum": "https://uri.fiware.org/ns/datamodels/dayMaximum", + "dayMinimum": "https://uri.fiware.org/ns/datamodels/dayMinimum", + "decorativeArts": "https://uri.fiware.org/ns/datamodels/decorativeArts", + "defectiveLamp": "https://uri.fiware.org/ns/datamodels/defectiveLamp", + "defibrillator": "https://uri.fiware.org/ns/datamodels/defibrillator", + "departureTime": "https://uri.fiware.org/ns/datamodels/departureTime", + "depth": "https://uri.fiware.org/ns/datamodels/depth", + "detached": "https://uri.fiware.org/ns/datamodels/detached", + "deviceClass": { + "@id": "https://uri.fiware.org/ns/datamodels/deviceClass", + "@type": "@vocab" + }, + "deviceState": "https://uri.fiware.org/ns/datamodels/deviceState", + "dewPoint": "https://uri.fiware.org/ns/datamodels/dewPoint", + "diesel": "https://uri.fiware.org/ns/datamodels/diesel", + "digester": "https://uri.fiware.org/ns/datamodels/digester", + "diocesanMuseum": "https://uri.fiware.org/ns/datamodels/diocesanMuseum", + "direction": { + "@id": "https://uri.fiware.org/ns/datamodels/direction", + "@type": "@vocab" + }, + "directional": "https://uri.fiware.org/ns/datamodels/directional", + "disabledRamp": "https://uri.fiware.org/ns/datamodels/disabledRamp", + "displacementPowerFactor": "https://uri.fiware.org/ns/datamodels/displacementPowerFactor", + "distanceTravelled": "https://uri.fiware.org/ns/datamodels/distanceTravelled", + "documentation": "https://uri.fiware.org/ns/datamodels/documentation", + "dog": "https://uri.fiware.org/ns/datamodels/dog", + "dolmen": "https://uri.fiware.org/ns/datamodels/dolmen", + "dormitory": "https://uri.fiware.org/ns/datamodels/dormitory", + "drainFlow": "https://uri.fiware.org/ns/datamodels/drainFlow", + "dropOff": "https://uri.fiware.org/ns/datamodels/dropOff", + "dropOffMechanical": "https://uri.fiware.org/ns/datamodels/dropOffMechanical", + "dropOffType": { + "@id": "https://uri.fiware.org/ns/datamodels/dropOffType", + "@type": "@vocab" + }, + "dropOffWithValet": "https://uri.fiware.org/ns/datamodels/dropOffWithValet", + "dropped": "https://uri.fiware.org/ns/datamodels/dropped", + "dumpingStation": "https://uri.fiware.org/ns/datamodels/dumpingStation", + "dumpster": "https://uri.fiware.org/ns/datamodels/dumpster", + "earthquake": "https://uri.fiware.org/ns/datamodels/earthquake", + "echelonParking": "https://uri.fiware.org/ns/datamodels/echelonParking", + "effectiveSince": { + "@id": "https://uri.fiware.org/ns/datamodels/effectiveSince", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "electric": "https://uri.fiware.org/ns/datamodels/electric", + "electricChargingStation": "https://uri.fiware.org/ns/datamodels/electricChargingStation", + "electricityConsumption": "https://uri.fiware.org/ns/datamodels/electricityConsumption", + "electronics": "https://uri.fiware.org/ns/datamodels/electronics", + "elevator": "https://uri.fiware.org/ns/datamodels/elevator", + "elliptic": "https://uri.fiware.org/ns/datamodels/elliptic", + "employeePermit": "https://uri.fiware.org/ns/datamodels/employeePermit", + "empty": "https://uri.fiware.org/ns/datamodels/empty", + "endDate": "https://uri.fiware.org/ns/datamodels/endDate", + "endKilometer": "https://uri.fiware.org/ns/datamodels/endKilometer", + "endPoint": "https://uri.fiware.org/ns/datamodels/endPoint", + "endTime": "https://uri.fiware.org/ns/datamodels/endTime", + "endedAt": { + "@id": "https://uri.fiware.org/ns/datamodels/endedAt", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "endpoint": "https://uri.fiware.org/ns/datamodels/endpoint", + "energy": "https://uri.fiware.org/ns/datamodels/energy", + "energyConsumed": "https://uri.fiware.org/ns/datamodels/energyConsumed", + "energyCost": "https://uri.fiware.org/ns/datamodels/energyCost", + "energyLimitationClass": { + "@id": "https://uri.fiware.org/ns/datamodels/energyLimitationClass", + "@type": "@vocab" + }, + "english": "https://uri.fiware.org/ns/datamodels/english", + "entertainment": "https://uri.fiware.org/ns/datamodels/entertainment", + "environment": "https://uri.fiware.org/ns/datamodels/environment", + "ethnology": "https://uri.fiware.org/ns/datamodels/ethnology", + "eventNotification": "https://uri.fiware.org/ns/datamodels/eventNotification", + "eventParking": "https://uri.fiware.org/ns/datamodels/eventParking", + "exactTimes": "https://uri.fiware.org/ns/datamodels/exactTimes", + "exceptionType": { + "@id": "https://uri.fiware.org/ns/datamodels/exceptionType", + "@type": "@vocab" + }, + "exhibitonCentre": "https://uri.fiware.org/ns/datamodels/exhibitonCentre", + "extCategory": "https://uri.fiware.org/ns/datamodels/extCategory", + "externalSecurity": "https://uri.fiware.org/ns/datamodels/externalSecurity", + "extraSpotNumber": "https://uri.fiware.org/ns/datamodels/extraSpotNumber", + "facilities": { + "@id": "https://uri.fiware.org/ns/datamodels/facilities", + "@type": "@vocab" + }, + "failed": "https://uri.fiware.org/ns/datamodels/failed", + "fair": "https://uri.fiware.org/ns/datamodels/fair", + "fairPermit": "https://uri.fiware.org/ns/datamodels/fairPermit", + "fairground": "https://uri.fiware.org/ns/datamodels/fairground", + "fallenPatient": "https://uri.fiware.org/ns/datamodels/fallenPatient", + "falling": "https://uri.fiware.org/ns/datamodels/falling", + "farm": "https://uri.fiware.org/ns/datamodels/farm", + "farm_auxiliary": "https://uri.fiware.org/ns/datamodels/farm_auxiliary", + "faxMachineOrService": "https://uri.fiware.org/ns/datamodels/faxMachineOrService", + "façade": "https://uri.fiware.org/ns/datamodels/façade", + "feature": { + "@id": "https://uri.fiware.org/ns/datamodels/feature", + "@type": "@vocab" + }, + "featuredArtist": { + "@id": "https://uri.fiware.org/ns/datamodels/featuredArtist", + "@type": "@id" + }, + "features": { + "@id": "https://uri.fiware.org/ns/datamodels/features", + "@type": "@vocab" + }, + "fedWith": { + "@id": "https://uri.fiware.org/ns/datamodels/fedWith", + "@type": "@id" + }, + "feeCharged": "https://uri.fiware.org/ns/datamodels/feeCharged", + "feelLikesTemperature": "https://uri.fiware.org/ns/datamodels/feelLikesTemperature", + "female": "https://uri.fiware.org/ns/datamodels/female", + "femaleAdult": "https://uri.fiware.org/ns/datamodels/femaleAdult", + "femaleYoung": "https://uri.fiware.org/ns/datamodels/femaleYoung", + "fenced": "https://uri.fiware.org/ns/datamodels/fenced", + "fencedOff": "https://uri.fiware.org/ns/datamodels/fencedOff", + "fences": "https://uri.fiware.org/ns/datamodels/fences", + "ferryTerminal": "https://uri.fiware.org/ns/datamodels/ferryTerminal", + "fertilisation": "https://uri.fiware.org/ns/datamodels/fertilisation", + "fertiliser": "https://uri.fiware.org/ns/datamodels/fertiliser", + "field": "https://uri.fiware.org/ns/datamodels/field", + "fillingLevel": "https://uri.fiware.org/ns/datamodels/fillingLevel", + "financial": "https://uri.fiware.org/ns/datamodels/financial", + "fineArts": "https://uri.fiware.org/ns/datamodels/fineArts", + "finished": "https://uri.fiware.org/ns/datamodels/finished", + "fireBrigade": "https://uri.fiware.org/ns/datamodels/fireBrigade", + "fireExtinguisher": "https://uri.fiware.org/ns/datamodels/fireExtinguisher", + "fireHose": "https://uri.fiware.org/ns/datamodels/fireHose", + "fireHydrant": "https://uri.fiware.org/ns/datamodels/fireHydrant", + "fireRisk": "https://uri.fiware.org/ns/datamodels/fireRisk", + "firmwareVersion": "https://uri.fiware.org/ns/datamodels/firmwareVersion", + "firstAidEquipment": "https://uri.fiware.org/ns/datamodels/firstAidEquipment", + "firstAvailableFloor": "https://uri.fiware.org/ns/datamodels/firstAvailableFloor", + "firstIntervalPrice": "https://uri.fiware.org/ns/datamodels/firstIntervalPrice", + "fishMarket": "https://uri.fiware.org/ns/datamodels/fishMarket", + "fixed": "https://uri.fiware.org/ns/datamodels/fixed", + "flashingBeacon": "https://uri.fiware.org/ns/datamodels/flashingBeacon", + "flat": "https://uri.fiware.org/ns/datamodels/flat", + "fleetVehicleId": "https://uri.fiware.org/ns/datamodels/fleetVehicleId", + "flood": "https://uri.fiware.org/ns/datamodels/flood", + "floodLight": "https://uri.fiware.org/ns/datamodels/floodLight", + "floodRisk": "https://uri.fiware.org/ns/datamodels/floodRisk", + "floorsAboveGround": "https://uri.fiware.org/ns/datamodels/floorsAboveGround", + "floorsBelowGround": "https://uri.fiware.org/ns/datamodels/floorsBelowGround", + "fog": "https://uri.fiware.org/ns/datamodels/fog", + "forBabies": "https://uri.fiware.org/ns/datamodels/forBabies", + "forCustomers": "https://uri.fiware.org/ns/datamodels/forCustomers", + "forDisabled": "https://uri.fiware.org/ns/datamodels/forDisabled", + "forElectricalCharging": "https://uri.fiware.org/ns/datamodels/forElectricalCharging", + "forEmployees": "https://uri.fiware.org/ns/datamodels/forEmployees", + "forMembers": "https://uri.fiware.org/ns/datamodels/forMembers", + "forResidents": "https://uri.fiware.org/ns/datamodels/forResidents", + "forStudents": "https://uri.fiware.org/ns/datamodels/forStudents", + "forVisitors": "https://uri.fiware.org/ns/datamodels/forVisitors", + "forestFire": "https://uri.fiware.org/ns/datamodels/forestFire", + "fortifiedTemple": "https://uri.fiware.org/ns/datamodels/fortifiedTemple", + "fortress": "https://uri.fiware.org/ns/datamodels/fortress", + "forward": "https://uri.fiware.org/ns/datamodels/forward", + "free": "https://uri.fiware.org/ns/datamodels/free", + "freeAccess": "https://uri.fiware.org/ns/datamodels/freeAccess", + "freeSlotNumber": "https://uri.fiware.org/ns/datamodels/freeSlotNumber", + "french": "https://uri.fiware.org/ns/datamodels/french", + "frequencies": "https://uri.fiware.org/ns/datamodels/frequencies", + "frequency": "https://uri.fiware.org/ns/datamodels/frequency", + "freshWater": "https://uri.fiware.org/ns/datamodels/freshWater", + "friday": "https://uri.fiware.org/ns/datamodels/friday", + "fuelConsumption": "https://uri.fiware.org/ns/datamodels/fuelConsumption", + "fuelType": { + "@id": "https://uri.fiware.org/ns/datamodels/fuelType", + "@type": "@vocab" + }, + "full": "https://uri.fiware.org/ns/datamodels/full", + "fullAtEntrance": "https://uri.fiware.org/ns/datamodels/fullAtEntrance", + "function": { + "@id": "https://uri.fiware.org/ns/datamodels/function", + "@type": "@vocab" + }, + "fungus": "https://uri.fiware.org/ns/datamodels/fungus", + "garage": "https://uri.fiware.org/ns/datamodels/garage", + "garageBoxes": "https://uri.fiware.org/ns/datamodels/garageBoxes", + "garages": "https://uri.fiware.org/ns/datamodels/garages", + "garbageCollection": "https://uri.fiware.org/ns/datamodels/garbageCollection", + "garbage_shed": "https://uri.fiware.org/ns/datamodels/garbage_shed", + "garden": "https://uri.fiware.org/ns/datamodels/garden", + "gasComsumption": "https://uri.fiware.org/ns/datamodels/gasComsumption", + "gasoline": "https://uri.fiware.org/ns/datamodels/gasoline", + "gate": "https://uri.fiware.org/ns/datamodels/gate", + "gateAccess": "https://uri.fiware.org/ns/datamodels/gateAccess", + "glass": "https://uri.fiware.org/ns/datamodels/glass", + "goat": "https://uri.fiware.org/ns/datamodels/goat", + "good": "https://uri.fiware.org/ns/datamodels/good", + "goodsSelling": "https://uri.fiware.org/ns/datamodels/goodsSelling", + "governmentPermit": "https://uri.fiware.org/ns/datamodels/governmentPermit", + "gps": "https://uri.fiware.org/ns/datamodels/gps", + "grandstand": "https://uri.fiware.org/ns/datamodels/grandstand", + "grave": "https://uri.fiware.org/ns/datamodels/grave", + "graveyard": "https://uri.fiware.org/ns/datamodels/graveyard", + "grazingBaby": "https://uri.fiware.org/ns/datamodels/grazingBaby", + "greenhouse": "https://uri.fiware.org/ns/datamodels/greenhouse", + "ground": "https://uri.fiware.org/ns/datamodels/ground", + "group": "https://uri.fiware.org/ns/datamodels/group", + "growing": "https://uri.fiware.org/ns/datamodels/growing", + "guard24hours": "https://uri.fiware.org/ns/datamodels/guard24hours", + "guarded": "https://uri.fiware.org/ns/datamodels/guarded", + "guidedTour": "https://uri.fiware.org/ns/datamodels/guidedTour", + "hangar": "https://uri.fiware.org/ns/datamodels/hangar", + "hardwareVersion": "https://uri.fiware.org/ns/datamodels/hardwareVersion", + "harvestCommodity": "https://uri.fiware.org/ns/datamodels/harvestCommodity", + "harvestingInterval": "https://uri.fiware.org/ns/datamodels/harvestingInterval", + "hasAccessPoint": "https://uri.fiware.org/ns/datamodels/hasAccessPoint", + "hasAgriCrop": { + "@id": "https://uri.fiware.org/ns/datamodels/hasAgriCrop", + "@type": "@id" + }, + "hasAgriFertiliser": { + "@id": "https://uri.fiware.org/ns/datamodels/hasAgriFertiliser", + "@type": "@id" + }, + "hasAgriParcel": { + "@id": "https://uri.fiware.org/ns/datamodels/hasAgriParcel", + "@type": "@id" + }, + "hasAgriParcelChildren": { + "@id": "https://uri.fiware.org/ns/datamodels/hasAgriParcelChildren", + "@type": "@id" + }, + "hasAgriParcelParent": { + "@id": "https://uri.fiware.org/ns/datamodels/hasAgriParcelParent", + "@type": "@id" + }, + "hasAgriPest": { + "@id": "https://uri.fiware.org/ns/datamodels/hasAgriPest", + "@type": "@id" + }, + "hasAgriProductType": { + "@id": "https://uri.fiware.org/ns/datamodels/hasAgriProductType", + "@type": "@id" + }, + "hasAgriProductTypeChildren": { + "@id": "https://uri.fiware.org/ns/datamodels/hasAgriProductTypeChildren", + "@type": "@id" + }, + "hasAgriProductTypeParent": { + "@id": "https://uri.fiware.org/ns/datamodels/hasAgriProductTypeParent", + "@type": "@id" + }, + "hasAgriSoil": { + "@id": "https://uri.fiware.org/ns/datamodels/hasAgriSoil", + "@type": "@id" + }, + "hasBuilding": { + "@id": "https://uri.fiware.org/ns/datamodels/hasBuilding", + "@type": "@id" + }, + "hasDestination": "https://uri.fiware.org/ns/datamodels/hasDestination", + "hasDevice": { + "@id": "https://uri.fiware.org/ns/datamodels/hasDevice", + "@type": "@id" + }, + "hasDevices": { + "@id": "https://uri.fiware.org/ns/datamodels/hasDevices", + "@type": "@id" + }, + "hasOperator": { + "@id": "https://uri.fiware.org/ns/datamodels/hasOperator", + "@type": "@id" + }, + "hasOrigin": "https://uri.fiware.org/ns/datamodels/hasOrigin", + "hasParentStation": "https://uri.fiware.org/ns/datamodels/hasParentStation", + "hasProvider": { + "@id": "https://uri.fiware.org/ns/datamodels/hasProvider", + "@type": "@id" + }, + "hasRoute": "https://uri.fiware.org/ns/datamodels/hasRoute", + "hasService": "https://uri.fiware.org/ns/datamodels/hasService", + "hasShape": "https://uri.fiware.org/ns/datamodels/hasShape", + "hasStop": "https://uri.fiware.org/ns/datamodels/hasStop", + "hasTrip": "https://uri.fiware.org/ns/datamodels/hasTrip", + "hasWaterQualityObserved": { + "@id": "https://uri.fiware.org/ns/datamodels/hasWaterQualityObserved", + "@type": "@id" + }, + "hasWeatherObserved": { + "@id": "https://uri.fiware.org/ns/datamodels/hasWeatherObserved", + "@type": "@id" + }, + "hazardOnRoad": "https://uri.fiware.org/ns/datamodels/hazardOnRoad", + "hazardous": "https://uri.fiware.org/ns/datamodels/hazardous", + "headSign": "https://uri.fiware.org/ns/datamodels/headSign", + "heading": "https://uri.fiware.org/ns/datamodels/heading", + "headsign": "https://uri.fiware.org/ns/datamodels/headsign", + "headwaySeconds": "https://uri.fiware.org/ns/datamodels/headwaySeconds", + "health": "https://uri.fiware.org/ns/datamodels/health", + "healthCondition": { + "@id": "https://uri.fiware.org/ns/datamodels/healthCondition", + "@type": "@vocab" + }, + "healthy": "https://uri.fiware.org/ns/datamodels/healthy", + "heartAttack": "https://uri.fiware.org/ns/datamodels/heartAttack", + "heatWave": "https://uri.fiware.org/ns/datamodels/heatWave", + "hedge": "https://uri.fiware.org/ns/datamodels/hedge", + "height": "https://uri.fiware.org/ns/datamodels/height", + "herb_garden": "https://uri.fiware.org/ns/datamodels/herb_garden", + "hermitage": "https://uri.fiware.org/ns/datamodels/hermitage", + "high": "https://uri.fiware.org/ns/datamodels/high", + "highTemperature": "https://uri.fiware.org/ns/datamodels/highTemperature", + "highest": "https://uri.fiware.org/ns/datamodels/highest", + "highestFloor": "https://uri.fiware.org/ns/datamodels/highestFloor", + "historicalPeriod": "https://uri.fiware.org/ns/datamodels/historicalPeriod", + "history": "https://uri.fiware.org/ns/datamodels/history", + "horse": "https://uri.fiware.org/ns/datamodels/horse", + "hospital": "https://uri.fiware.org/ns/datamodels/hospital", + "hostelry": "https://uri.fiware.org/ns/datamodels/hostelry", + "hotel": "https://uri.fiware.org/ns/datamodels/hotel", + "hourly": "https://uri.fiware.org/ns/datamodels/hourly", + "house": "https://uri.fiware.org/ns/datamodels/house", + "houseBuilding": "https://uri.fiware.org/ns/datamodels/houseBuilding", + "houseboat": "https://uri.fiware.org/ns/datamodels/houseboat", + "household": "https://uri.fiware.org/ns/datamodels/household", + "humidity": "https://uri.fiware.org/ns/datamodels/humidity", + "hurricane": "https://uri.fiware.org/ns/datamodels/hurricane", + "hut": "https://uri.fiware.org/ns/datamodels/hut", + "hybrid electric/diesel": "https://uri.fiware.org/ns/datamodels/hybrid electric/diesel", + "hybrid electric/petrol": "https://uri.fiware.org/ns/datamodels/hybrid electric/petrol", + "hydrogen": "https://uri.fiware.org/ns/datamodels/hydrogen", + "ice": "https://uri.fiware.org/ns/datamodels/ice", + "iceFreeScaffold": "https://uri.fiware.org/ns/datamodels/iceFreeScaffold", + "illuminance": "https://uri.fiware.org/ns/datamodels/illuminance", + "illuminanceLevel": "https://uri.fiware.org/ns/datamodels/illuminanceLevel", + "image": "https://schema.org/image", + "inCalf": "https://uri.fiware.org/ns/datamodels/inCalf", + "inHeat": "https://uri.fiware.org/ns/datamodels/inHeat", + "inTreatment": "https://uri.fiware.org/ns/datamodels/inTreatment", + "inactive": "https://uri.fiware.org/ns/datamodels/inactive", + "inbound": "https://uri.fiware.org/ns/datamodels/inbound", + "individual": "https://uri.fiware.org/ns/datamodels/individual", + "individualControl": "https://uri.fiware.org/ns/datamodels/individualControl", + "industrial": "https://uri.fiware.org/ns/datamodels/industrial", + "industrialBuilding": "https://uri.fiware.org/ns/datamodels/industrialBuilding", + "informationPoint": "https://uri.fiware.org/ns/datamodels/informationPoint", + "informational": "https://uri.fiware.org/ns/datamodels/informational", + "infotainment": "https://uri.fiware.org/ns/datamodels/infotainment", + "injuredBiker": "https://uri.fiware.org/ns/datamodels/injuredBiker", + "inorganic": "https://uri.fiware.org/ns/datamodels/inorganic", + "input": "https://uri.fiware.org/ns/datamodels/input", + "insect": "https://uri.fiware.org/ns/datamodels/insect", + "insertHoles": "https://uri.fiware.org/ns/datamodels/insertHoles", + "insertHolesNumber": "https://uri.fiware.org/ns/datamodels/insertHolesNumber", + "inspection": "https://uri.fiware.org/ns/datamodels/inspection", + "intensity": "https://uri.fiware.org/ns/datamodels/intensity", + "internetConnection": "https://uri.fiware.org/ns/datamodels/internetConnection", + "internetWireless": "https://uri.fiware.org/ns/datamodels/internetWireless", + "ipAddress": "https://uri.fiware.org/ns/datamodels/ipAddress", + "irrigation": "https://uri.fiware.org/ns/datamodels/irrigation", + "irrigationRecord": "https://uri.fiware.org/ns/datamodels/irrigationRecord", + "isleId": "https://uri.fiware.org/ns/datamodels/isleId", + "isolated": "https://uri.fiware.org/ns/datamodels/isolated", + "issue": "https://uri.fiware.org/ns/datamodels/issue", + "japanese": "https://uri.fiware.org/ns/datamodels/japanese", + "justBorn": "https://uri.fiware.org/ns/datamodels/justBorn", + "kindergarten": "https://uri.fiware.org/ns/datamodels/kindergarten", + "kiosk": "https://uri.fiware.org/ns/datamodels/kiosk", + "kissAndRide": "https://uri.fiware.org/ns/datamodels/kissAndRide", + "kitchen": "https://uri.fiware.org/ns/datamodels/kitchen", + "kpiValue": "https://uri.fiware.org/ns/datamodels/kpiValue", + "lactatingBaby": "https://uri.fiware.org/ns/datamodels/lactatingBaby", + "lagging": "https://uri.fiware.org/ns/datamodels/lagging", + "lampBrandName": "https://uri.fiware.org/ns/datamodels/lampBrandName", + "lampManufacturerName": "https://uri.fiware.org/ns/datamodels/lampManufacturerName", + "lampModelName": "https://uri.fiware.org/ns/datamodels/lampModelName", + "lampTechnology": { + "@id": "https://uri.fiware.org/ns/datamodels/lampTechnology", + "@type": "@vocab" + }, + "lampWeight": "https://uri.fiware.org/ns/datamodels/lampWeight", + "lamppost": "https://uri.fiware.org/ns/datamodels/lamppost", + "landLocation": "https://uri.fiware.org/ns/datamodels/landLocation", + "laneDirection": { + "@id": "https://uri.fiware.org/ns/datamodels/laneDirection", + "@type": "@vocab" + }, + "laneId": "https://uri.fiware.org/ns/datamodels/laneId", + "laneUsage": { + "@id": "https://uri.fiware.org/ns/datamodels/laneUsage", + "@type": "@vocab" + }, + "language": "https://uri.fiware.org/ns/datamodels/language", + "lanternBrandName": "https://uri.fiware.org/ns/datamodels/lanternBrandName", + "lanternManufacturerName": "https://uri.fiware.org/ns/datamodels/lanternManufacturerName", + "lanternModelName": "https://uri.fiware.org/ns/datamodels/lanternModelName", + "lanternWeight": "https://uri.fiware.org/ns/datamodels/lanternWeight", + "lastMeterReading": "https://uri.fiware.org/ns/datamodels/lastMeterReading", + "lastPlantedAt": { + "@id": "https://uri.fiware.org/ns/datamodels/lastPlantedAt", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "lastupdatedAt": { + "@id": "https://uri.fiware.org/ns/datamodels/lastupdatedAt", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "laternHeight": "https://uri.fiware.org/ns/datamodels/laternHeight", + "lawnArea": "https://uri.fiware.org/ns/datamodels/lawnArea", + "layout": { + "@id": "https://uri.fiware.org/ns/datamodels/layout", + "@type": "@vocab" + }, + "leading": "https://uri.fiware.org/ns/datamodels/leading", + "leafRelativeHumidity": "https://uri.fiware.org/ns/datamodels/leafRelativeHumidity", + "leafTemperature": "https://uri.fiware.org/ns/datamodels/leafTemperature", + "leafWetness": "https://uri.fiware.org/ns/datamodels/leafWetness", + "legalId": "https://uri.fiware.org/ns/datamodels/legalId", + "length": "https://uri.fiware.org/ns/datamodels/length", + "levelControl": "https://uri.fiware.org/ns/datamodels/levelControl", + "lid": "https://uri.fiware.org/ns/datamodels/lid", + "lidOpen": "https://uri.fiware.org/ns/datamodels/lidOpen", + "lifeGuard": "https://uri.fiware.org/ns/datamodels/lifeGuard", + "liftshare": "https://uri.fiware.org/ns/datamodels/liftshare", + "light": "https://uri.fiware.org/ns/datamodels/light", + "lightTower": "https://uri.fiware.org/ns/datamodels/lightTower", + "lighting": "https://uri.fiware.org/ns/datamodels/lighting", + "link": "https://uri.fiware.org/ns/datamodels/link", + "literature": "https://uri.fiware.org/ns/datamodels/literature", + "litterBins": "https://uri.fiware.org/ns/datamodels/litterBins", + "loadingBay": "https://uri.fiware.org/ns/datamodels/loadingBay", + "locatedAt": { + "@id": "https://uri.fiware.org/ns/datamodels/locatedAt", + "@type": "@id" + }, + "locationCategory": { + "@id": "https://uri.fiware.org/ns/datamodels/locationCategory", + "@type": "@vocab" + }, + "lockable": "https://uri.fiware.org/ns/datamodels/lockable", + "longTerm": "https://uri.fiware.org/ns/datamodels/longTerm", + "lorry": "https://uri.fiware.org/ns/datamodels/lorry", + "low": "https://uri.fiware.org/ns/datamodels/low", + "lowTemperature": "https://uri.fiware.org/ns/datamodels/lowTemperature", + "lowest": "https://uri.fiware.org/ns/datamodels/lowest", + "lowestFloor": "https://uri.fiware.org/ns/datamodels/lowestFloor", + "lpg": "https://uri.fiware.org/ns/datamodels/lpg", + "luggageLocker": "https://uri.fiware.org/ns/datamodels/luggageLocker", + "luminousFlux": "https://uri.fiware.org/ns/datamodels/luminousFlux", + "macAddress": "https://uri.fiware.org/ns/datamodels/macAddress", + "madeOf": { + "@id": "https://uri.fiware.org/ns/datamodels/madeOf", + "@type": "@vocab" + }, + "madeOfCode": "https://uri.fiware.org/ns/datamodels/madeOfCode", + "maintenance": "https://uri.fiware.org/ns/datamodels/maintenance", + "male": "https://uri.fiware.org/ns/datamodels/male", + "maleAdult": "https://uri.fiware.org/ns/datamodels/maleAdult", + "maleYoung": "https://uri.fiware.org/ns/datamodels/maleYoung", + "mandatory": "https://uri.fiware.org/ns/datamodels/mandatory", + "mansion": "https://uri.fiware.org/ns/datamodels/mansion", + "manual": "https://uri.fiware.org/ns/datamodels/manual", + "manufacturerName": "https://uri.fiware.org/ns/datamodels/manufacturerName", + "maritime": "https://uri.fiware.org/ns/datamodels/maritime", + "market": "https://uri.fiware.org/ns/datamodels/market", + "masia": "https://uri.fiware.org/ns/datamodels/masia", + "masiaFortificada": "https://uri.fiware.org/ns/datamodels/masiaFortificada", + "maturing": "https://uri.fiware.org/ns/datamodels/maturing", + "maxPowerConsumption": "https://uri.fiware.org/ns/datamodels/maxPowerConsumption", + "maximum": "https://uri.fiware.org/ns/datamodels/maximum", + "maximumAllowedDuration": "https://uri.fiware.org/ns/datamodels/maximumAllowedDuration", + "maximumAllowedHeight": "https://uri.fiware.org/ns/datamodels/maximumAllowedHeight", + "maximumAllowedSpeed": "https://uri.fiware.org/ns/datamodels/maximumAllowedSpeed", + "maximumAllowedWeight": "https://uri.fiware.org/ns/datamodels/maximumAllowedWeight", + "maximumAllowedWidth": "https://uri.fiware.org/ns/datamodels/maximumAllowedWidth", + "maximumLoad": "https://uri.fiware.org/ns/datamodels/maximumLoad", + "maximumPowerAvailable": "https://uri.fiware.org/ns/datamodels/maximumPowerAvailable", + "measurand": "https://uri.fiware.org/ns/datamodels/measurand", + "measuresPeriod": "https://uri.fiware.org/ns/datamodels/measuresPeriod", + "measuresPeriodUnit": "https://uri.fiware.org/ns/datamodels/measuresPeriodUnit", + "medicineAndPharmacy": "https://uri.fiware.org/ns/datamodels/medicineAndPharmacy", + "medium": "https://uri.fiware.org/ns/datamodels/medium", + "mediumTerm": "https://uri.fiware.org/ns/datamodels/mediumTerm", + "menhir": "https://uri.fiware.org/ns/datamodels/menhir", + "metal": "https://uri.fiware.org/ns/datamodels/metal", + "meterReadingPeriod": "https://uri.fiware.org/ns/datamodels/meterReadingPeriod", + "metering": "https://uri.fiware.org/ns/datamodels/metering", + "methaneConcentration": "https://uri.fiware.org/ns/datamodels/methaneConcentration", + "microbe": "https://uri.fiware.org/ns/datamodels/microbe", + "mileageFromOdometer": "https://uri.fiware.org/ns/datamodels/mileageFromOdometer", + "military": "https://uri.fiware.org/ns/datamodels/military", + "militaryBuilding": "https://uri.fiware.org/ns/datamodels/militaryBuilding", + "minPowerConsumption": "https://uri.fiware.org/ns/datamodels/minPowerConsumption", + "minaret": "https://uri.fiware.org/ns/datamodels/minaret", + "minibus": "https://uri.fiware.org/ns/datamodels/minibus", + "minimum": "https://uri.fiware.org/ns/datamodels/minimum", + "minimumAllowedSpeed": "https://uri.fiware.org/ns/datamodels/minimumAllowedSpeed", + "minimumTransferTime": "https://uri.fiware.org/ns/datamodels/minimumTransferTime", + "mining": "https://uri.fiware.org/ns/datamodels/mining", + "mite": "https://uri.fiware.org/ns/datamodels/mite", + "mnc": "https://uri.fiware.org/ns/datamodels/mnc", + "modelBased": "https://uri.fiware.org/ns/datamodels/modelBased", + "modelName": "https://uri.fiware.org/ns/datamodels/modelName", + "moderate": "https://uri.fiware.org/ns/datamodels/moderate", + "modernArt": "https://uri.fiware.org/ns/datamodels/modernArt", + "monastary": "https://uri.fiware.org/ns/datamodels/monastary", + "monastery": "https://uri.fiware.org/ns/datamodels/monastery", + "monday": "https://uri.fiware.org/ns/datamodels/monday", + "monolith": "https://uri.fiware.org/ns/datamodels/monolith", + "monthly": "https://uri.fiware.org/ns/datamodels/monthly", + "monthlyPayment": "https://uri.fiware.org/ns/datamodels/monthlyPayment", + "moped": "https://uri.fiware.org/ns/datamodels/moped", + "mosque": "https://uri.fiware.org/ns/datamodels/mosque", + "motion": "https://uri.fiware.org/ns/datamodels/motion", + "motorcycle": "https://uri.fiware.org/ns/datamodels/motorcycle", + "motorcycleWithSideCar": "https://uri.fiware.org/ns/datamodels/motorcycleWithSideCar", + "motorscooter": "https://uri.fiware.org/ns/datamodels/motorscooter", + "motorway": "https://uri.fiware.org/ns/datamodels/motorway", + "moved": "https://uri.fiware.org/ns/datamodels/moved", + "multiDisciplinar": "https://uri.fiware.org/ns/datamodels/multiDisciplinar", + "multiLevel": "https://uri.fiware.org/ns/datamodels/multiLevel", + "multiStorey": "https://uri.fiware.org/ns/datamodels/multiStorey", + "municipal": "https://uri.fiware.org/ns/datamodels/municipal", + "municipalServices": "https://uri.fiware.org/ns/datamodels/municipalServices", + "museumHouse": "https://uri.fiware.org/ns/datamodels/museumHouse", + "museumType": { + "@id": "https://uri.fiware.org/ns/datamodels/museumType", + "@type": "@vocab" + }, + "music": "https://uri.fiware.org/ns/datamodels/music", + "naturalDisaster": "https://uri.fiware.org/ns/datamodels/naturalDisaster", + "naturalScience": "https://uri.fiware.org/ns/datamodels/naturalScience", + "necropolis": "https://uri.fiware.org/ns/datamodels/necropolis", + "nematodes": "https://uri.fiware.org/ns/datamodels/nematodes", + "nested": "https://uri.fiware.org/ns/datamodels/nested", + "network": "https://uri.fiware.org/ns/datamodels/network", + "nextActuationDeadline": { + "@id": "https://uri.fiware.org/ns/datamodels/nextActuationDeadline", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "nextCleaningDeadline": { + "@id": "https://uri.fiware.org/ns/datamodels/nextCleaningDeadline", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "nextWateringDeadline": { + "@id": "https://uri.fiware.org/ns/datamodels/nextWateringDeadline", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "night-LOW": "https://uri.fiware.org/ns/datamodels/night-LOW", + "night-OFF": "https://uri.fiware.org/ns/datamodels/night-OFF", + "night-ON": "https://uri.fiware.org/ns/datamodels/night-ON", + "noPermitNeeded": "https://uri.fiware.org/ns/datamodels/noPermitNeeded", + "noStatus": "https://uri.fiware.org/ns/datamodels/noStatus", + "nobleHouse": "https://uri.fiware.org/ns/datamodels/nobleHouse", + "noiseLevel": "https://uri.fiware.org/ns/datamodels/noiseLevel", + "nonTracked": "https://uri.fiware.org/ns/datamodels/nonTracked", + "none": "https://uri.fiware.org/ns/datamodels/none", + "notAvailable": "https://uri.fiware.org/ns/datamodels/notAvailable", + "noxiousWeed": "https://uri.fiware.org/ns/datamodels/noxiousWeed", + "numismatic": "https://uri.fiware.org/ns/datamodels/numismatic", + "observedAt": { + "@id": "https://uri.fiware.org/ns/datamodels/observedAt", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "occupancy": "https://uri.fiware.org/ns/datamodels/occupancy", + "occupancyDetectionType": { + "@id": "https://uri.fiware.org/ns/datamodels/occupancyDetectionType", + "@type": "@vocab" + }, + "occupationRate": { + "@id": "https://uri.fiware.org/ns/datamodels/occupationRate", + "@type": "@vocab" + }, + "occupied": "https://uri.fiware.org/ns/datamodels/occupied", + "occupier": { + "@id": "https://uri.fiware.org/ns/datamodels/occupier", + "@type": "@id" + }, + "off": "https://uri.fiware.org/ns/datamodels/off", + "office": "https://uri.fiware.org/ns/datamodels/office", + "officeBuilding": "https://uri.fiware.org/ns/datamodels/officeBuilding", + "offstreet": "https://uri.fiware.org/ns/datamodels/offstreet", + "oil": "https://uri.fiware.org/ns/datamodels/oil", + "ok": "https://uri.fiware.org/ns/datamodels/ok", + "on": "https://uri.fiware.org/ns/datamodels/on", + "onDemand": "https://uri.fiware.org/ns/datamodels/onDemand", + "onFoot": "https://uri.fiware.org/ns/datamodels/onFoot", + "onOff": "https://uri.fiware.org/ns/datamodels/onOff", + "onRoute": "https://uri.fiware.org/ns/datamodels/onRoute", + "oneway": "https://uri.fiware.org/ns/datamodels/oneway", + "ongoing": "https://uri.fiware.org/ns/datamodels/ongoing", + "onlyResidents": "https://uri.fiware.org/ns/datamodels/onlyResidents", + "onlyWithPermit": "https://uri.fiware.org/ns/datamodels/onlyWithPermit", + "onstreet": "https://uri.fiware.org/ns/datamodels/onstreet", + "open": "https://uri.fiware.org/ns/datamodels/open", + "openClose": "https://uri.fiware.org/ns/datamodels/openClose", + "openSpace": "https://uri.fiware.org/ns/datamodels/openSpace", + "openingHours": "https://schema.org/openingHours", + "openingHoursSpecification": "https://uri.fiware.org/ns/datamodels/openingHoursSpecification", + "openingTimesInForce": "https://uri.fiware.org/ns/datamodels/openingTimesInForce", + "operatedBy": "https://uri.fiware.org/ns/datamodels/operatedBy", + "operationSequence": "https://uri.fiware.org/ns/datamodels/operationSequence", + "operationType": { + "@id": "https://uri.fiware.org/ns/datamodels/operationType", + "@type": "@vocab" + }, + "operator": "https://uri.fiware.org/ns/datamodels/operator", + "optional": "https://uri.fiware.org/ns/datamodels/optional", + "organic": "https://uri.fiware.org/ns/datamodels/organic", + "organization": "https://uri.fiware.org/ns/datamodels/organization", + "ornamentalLantern": "https://uri.fiware.org/ns/datamodels/ornamentalLantern", + "orp": "https://uri.fiware.org/ns/datamodels/orp", + "osVersion": "https://uri.fiware.org/ns/datamodels/osVersion", + "other": "https://uri.fiware.org/ns/datamodels/other", + "outOfService": "https://uri.fiware.org/ns/datamodels/outOfService", + "outOfServiceSlotNumber": "https://uri.fiware.org/ns/datamodels/outOfServiceSlotNumber", + "outbound": "https://uri.fiware.org/ns/datamodels/outbound", + "output": "https://uri.fiware.org/ns/datamodels/output", + "overnightParking": "https://uri.fiware.org/ns/datamodels/overnightParking", + "overspeed": "https://uri.fiware.org/ns/datamodels/overspeed", + "ownedBy": { + "@id": "https://uri.fiware.org/ns/datamodels/ownedBy", + "@type": "@id" + }, + "owner": { + "@id": "https://uri.fiware.org/ns/datamodels/owner", + "@type": "@id" + }, + "pH": "https://uri.fiware.org/ns/datamodels/pH", + "page": "https://uri.fiware.org/ns/datamodels/page", + "painting": "https://uri.fiware.org/ns/datamodels/painting", + "palace": "https://uri.fiware.org/ns/datamodels/palace", + "paleonthology": "https://uri.fiware.org/ns/datamodels/paleonthology", + "pantheon": "https://uri.fiware.org/ns/datamodels/pantheon", + "paper": "https://uri.fiware.org/ns/datamodels/paper", + "parallelParking": "https://uri.fiware.org/ns/datamodels/parallelParking", + "park": "https://uri.fiware.org/ns/datamodels/park", + "parkAndCycle": "https://uri.fiware.org/ns/datamodels/parkAndCycle", + "parkAndRide": "https://uri.fiware.org/ns/datamodels/parkAndRide", + "parkAndWalk": "https://uri.fiware.org/ns/datamodels/parkAndWalk", + "parked": "https://uri.fiware.org/ns/datamodels/parked", + "parking": "https://uri.fiware.org/ns/datamodels/parking", + "parkingGarage": "https://uri.fiware.org/ns/datamodels/parkingGarage", + "parkingLot": "https://uri.fiware.org/ns/datamodels/parkingLot", + "parkingMode": { + "@id": "https://uri.fiware.org/ns/datamodels/parkingMode", + "@type": "@vocab" + }, + "parksAndGardens": "https://uri.fiware.org/ns/datamodels/parksAndGardens", + "partly": "https://uri.fiware.org/ns/datamodels/partly", + "patrolled": "https://uri.fiware.org/ns/datamodels/patrolled", + "pavilion": "https://uri.fiware.org/ns/datamodels/pavilion", + "payDesk": "https://uri.fiware.org/ns/datamodels/payDesk", + "paymentMachine": "https://uri.fiware.org/ns/datamodels/paymentMachine", + "pazo": "https://uri.fiware.org/ns/datamodels/pazo", + "pedestrianPath": "https://uri.fiware.org/ns/datamodels/pedestrianPath", + "peopleCount": "https://uri.fiware.org/ns/datamodels/peopleCount", + "perpendicularParking": "https://uri.fiware.org/ns/datamodels/perpendicularParking", + "pesticide": "https://uri.fiware.org/ns/datamodels/pesticide", + "petrol": "https://uri.fiware.org/ns/datamodels/petrol", + "petrol(leaded)": "https://uri.fiware.org/ns/datamodels/petrol(leaded)", + "petrol(unleaded)": "https://uri.fiware.org/ns/datamodels/petrol(unleaded)", + "phaseToPhaseVoltage": "https://uri.fiware.org/ns/datamodels/phaseToPhaseVoltage", + "phaseVoltage": "https://uri.fiware.org/ns/datamodels/phaseVoltage", + "phenologicalCondition": { + "@id": "https://uri.fiware.org/ns/datamodels/phenologicalCondition", + "@type": "@vocab" + }, + "phone": "https://uri.fiware.org/ns/datamodels/phone", + "pickupType": { + "@id": "https://uri.fiware.org/ns/datamodels/pickupType", + "@type": "@vocab" + }, + "pig": "https://uri.fiware.org/ns/datamodels/pig", + "planned": "https://uri.fiware.org/ns/datamodels/planned", + "plannedEndAt": { + "@id": "https://uri.fiware.org/ns/datamodels/plannedEndAt", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "plannedStartAt": { + "@id": "https://uri.fiware.org/ns/datamodels/plannedStartAt", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "plantingFrom": "https://uri.fiware.org/ns/datamodels/plantingFrom", + "plastic": "https://uri.fiware.org/ns/datamodels/plastic", + "playground": "https://uri.fiware.org/ns/datamodels/playground", + "police": "https://uri.fiware.org/ns/datamodels/police", + "pollenConcentration": "https://uri.fiware.org/ns/datamodels/pollenConcentration", + "polularArchitecture": "https://uri.fiware.org/ns/datamodels/polularArchitecture", + "polygon": "https://uri.fiware.org/ns/datamodels/polygon", + "popularArtsAndTraditions": "https://uri.fiware.org/ns/datamodels/popularArtsAndTraditions", + "portable": "https://uri.fiware.org/ns/datamodels/portable", + "postTop": "https://uri.fiware.org/ns/datamodels/postTop", + "pothole": "https://uri.fiware.org/ns/datamodels/pothole", + "power": "https://uri.fiware.org/ns/datamodels/power", + "powerConsumption": "https://uri.fiware.org/ns/datamodels/powerConsumption", + "powerFactor": "https://uri.fiware.org/ns/datamodels/powerFactor", + "powerState": { + "@id": "https://uri.fiware.org/ns/datamodels/powerState", + "@type": "@vocab" + }, + "practical": "https://uri.fiware.org/ns/datamodels/practical", + "precipitation": "https://uri.fiware.org/ns/datamodels/precipitation", + "prehistoric": "https://uri.fiware.org/ns/datamodels/prehistoric", + "prehistoricCave": "https://uri.fiware.org/ns/datamodels/prehistoricCave", + "prehistoricPlace": "https://uri.fiware.org/ns/datamodels/prehistoricPlace", + "pressure": "https://uri.fiware.org/ns/datamodels/pressure", + "pressureTendency": { + "@id": "https://uri.fiware.org/ns/datamodels/pressureTendency", + "@type": "@vocab" + }, + "previousLocation": "https://uri.fiware.org/ns/datamodels/previousLocation", + "priceCurrency": "https://uri.fiware.org/ns/datamodels/priceCurrency", + "priceRatePerMinute": "https://uri.fiware.org/ns/datamodels/priceRatePerMinute", + "primary": "https://uri.fiware.org/ns/datamodels/primary", + "private": "https://uri.fiware.org/ns/datamodels/private", + "privateVehicle": "https://uri.fiware.org/ns/datamodels/privateVehicle", + "proCathedral": "https://uri.fiware.org/ns/datamodels/proCathedral", + "process": "https://uri.fiware.org/ns/datamodels/process", + "product": "https://uri.fiware.org/ns/datamodels/product", + "promenade": "https://uri.fiware.org/ns/datamodels/promenade", + "provider": "https://uri.fiware.org/ns/datamodels/provider", + "proximitySensor": "https://uri.fiware.org/ns/datamodels/proximitySensor", + "public": "https://uri.fiware.org/ns/datamodels/public", + "publicBuilding": "https://uri.fiware.org/ns/datamodels/publicBuilding", + "publicPhone": "https://uri.fiware.org/ns/datamodels/publicPhone", + "publicPrivate": "https://uri.fiware.org/ns/datamodels/publicPrivate", + "publicTransport": "https://uri.fiware.org/ns/datamodels/publicTransport", + "publicTransportStation": "https://uri.fiware.org/ns/datamodels/publicTransportStation", + "purchaseDate": { + "@id": "https://uri.fiware.org/ns/datamodels/purchaseDate", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "pyramid": "https://uri.fiware.org/ns/datamodels/pyramid", + "qualitative": "https://uri.fiware.org/ns/datamodels/qualitative", + "quantitative": "https://uri.fiware.org/ns/datamodels/quantitative", + "quantity": "https://uri.fiware.org/ns/datamodels/quantity", + "quarterly": "https://uri.fiware.org/ns/datamodels/quarterly", + "railway": "https://uri.fiware.org/ns/datamodels/railway", + "rainfall": "https://uri.fiware.org/ns/datamodels/rainfall", + "rainwater capture": "https://uri.fiware.org/ns/datamodels/rainwater capture", + "raising": "https://uri.fiware.org/ns/datamodels/raising", + "ramp": "https://uri.fiware.org/ns/datamodels/ramp", + "reactiveEnergyConsumed": "https://uri.fiware.org/ns/datamodels/reactiveEnergyConsumed", + "reactiveEnergyExport": "https://uri.fiware.org/ns/datamodels/reactiveEnergyExport", + "reactiveEnergyImport": "https://uri.fiware.org/ns/datamodels/reactiveEnergyImport", + "reactivePower": "https://uri.fiware.org/ns/datamodels/reactivePower", + "readyForHarvesting": "https://uri.fiware.org/ns/datamodels/readyForHarvesting", + "recommendedLoad": "https://uri.fiware.org/ns/datamodels/recommendedLoad", + "rectangular": "https://uri.fiware.org/ns/datamodels/rectangular", + "refActivity": { + "@id": "https://uri.fiware.org/ns/datamodels/refActivity", + "@type": "@id" + }, + "refAgent": { + "@id": "https://uri.fiware.org/ns/datamodels/refAgent", + "@type": "@id" + }, + "refBuilding": { + "@id": "https://uri.fiware.org/ns/datamodels/refBuilding", + "@type": "@id" + }, + "refDevice": { + "@id": "https://uri.fiware.org/ns/datamodels/refDevice", + "@type": "@id" + }, + "refDeviceModel": { + "@id": "https://uri.fiware.org/ns/datamodels/refDeviceModel", + "@type": "@id" + }, + "refGarden": { + "@id": "https://uri.fiware.org/ns/datamodels/refGarden", + "@type": "@id" + }, + "refGreenspace": { + "@id": "https://uri.fiware.org/ns/datamodels/refGreenspace", + "@type": "@id" + }, + "refGtfsTransitFeedFile": "https://uri.fiware.org/ns/datamodels/refGtfsTransitFeedFile", + "refMap": "https://uri.fiware.org/ns/datamodels/refMap", + "refObject": { + "@id": "https://uri.fiware.org/ns/datamodels/refObject", + "@type": "@id" + }, + "refOperator": { + "@id": "https://uri.fiware.org/ns/datamodels/refOperator", + "@type": "@id" + }, + "refParkingAccess": { + "@id": "https://uri.fiware.org/ns/datamodels/refParkingAccess", + "@type": "@id" + }, + "refParkingGroup": { + "@id": "https://uri.fiware.org/ns/datamodels/refParkingGroup", + "@type": "@id" + }, + "refParkingSite": { + "@id": "https://uri.fiware.org/ns/datamodels/refParkingSite", + "@type": "@id" + }, + "refParkingSpot": { + "@id": "https://uri.fiware.org/ns/datamodels/refParkingSpot", + "@type": "@id" + }, + "refPointOfInterest": { + "@id": "https://uri.fiware.org/ns/datamodels/refPointOfInterest", + "@type": "@id" + }, + "refRecord": { + "@id": "https://uri.fiware.org/ns/datamodels/refRecord", + "@type": "@id" + }, + "refRelatedBuildingOperation": { + "@id": "https://uri.fiware.org/ns/datamodels/refRelatedBuildingOperation", + "@type": "@id" + }, + "refRelatedDeviceOperation": { + "@id": "https://uri.fiware.org/ns/datamodels/refRelatedDeviceOperation", + "@type": "@id" + }, + "refRelatedEntity": { + "@id": "https://uri.fiware.org/ns/datamodels/refRelatedEntity", + "@type": "@id" + }, + "refRoad": { + "@id": "https://uri.fiware.org/ns/datamodels/refRoad", + "@type": "@id" + }, + "refRoadSegment": "https://uri.fiware.org/ns/datamodels/refRoadSegment", + "refSeeAlso": { + "@id": "https://uri.fiware.org/ns/datamodels/refSeeAlso", + "@type": "@id" + }, + "refSmartPointOfInteraction": { + "@id": "https://uri.fiware.org/ns/datamodels/refSmartPointOfInteraction", + "@type": "@id" + }, + "refSmartSpot": { + "@id": "https://uri.fiware.org/ns/datamodels/refSmartSpot", + "@type": "@id" + }, + "refStreetlight": { + "@id": "https://uri.fiware.org/ns/datamodels/refStreetlight", + "@type": "@id" + }, + "refStreetlightControlCabinet": { + "@id": "https://uri.fiware.org/ns/datamodels/refStreetlightControlCabinet", + "@type": "@id" + }, + "refStreetlightGroup": { + "@id": "https://uri.fiware.org/ns/datamodels/refStreetlightGroup", + "@type": "@id" + }, + "refStreetlightModel": { + "@id": "https://uri.fiware.org/ns/datamodels/refStreetlightModel", + "@type": "@id" + }, + "refTarget": { + "@id": "https://uri.fiware.org/ns/datamodels/refTarget", + "@type": "@id" + }, + "refTargetDevice": { + "@id": "https://uri.fiware.org/ns/datamodels/refTargetDevice", + "@type": "@id" + }, + "refUser": { + "@id": "https://uri.fiware.org/ns/datamodels/refUser", + "@type": "@id" + }, + "refUserDevice": { + "@id": "https://uri.fiware.org/ns/datamodels/refUserDevice", + "@type": "@id" + }, + "refVehicleModel": { + "@id": "https://uri.fiware.org/ns/datamodels/refVehicleModel", + "@type": "@id" + }, + "refWasteContainer": { + "@id": "https://uri.fiware.org/ns/datamodels/refWasteContainer", + "@type": "@id" + }, + "refWasteContainerIsle": { + "@id": "https://uri.fiware.org/ns/datamodels/refWasteContainerIsle", + "@type": "@id" + }, + "refWasteContainerModel": { + "@id": "https://uri.fiware.org/ns/datamodels/refWasteContainerModel", + "@type": "@id" + }, + "refWeatherObserved": { + "@id": "https://uri.fiware.org/ns/datamodels/refWeatherObserved", + "@type": "@id" + }, + "refuseBin": "https://uri.fiware.org/ns/datamodels/refuseBin", + "regulation": "https://uri.fiware.org/ns/datamodels/regulation", + "relatedSource": { + "@id": "https://uri.fiware.org/ns/datamodels/relatedSource", + "@type": "@id" + }, + "relativeHumidity": "https://uri.fiware.org/ns/datamodels/relativeHumidity", + "reliability": "https://uri.fiware.org/ns/datamodels/reliability", + "religiousCentre": "https://uri.fiware.org/ns/datamodels/religiousCentre", + "remainingDistance": "https://uri.fiware.org/ns/datamodels/remainingDistance", + "remainingTime": "https://uri.fiware.org/ns/datamodels/remainingTime", + "reportedAt": { + "@id": "https://uri.fiware.org/ns/datamodels/reportedAt", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "reproductiveCondition": { + "@id": "https://uri.fiware.org/ns/datamodels/reproductiveCondition", + "@type": "@vocab" + }, + "requiredPermit": { + "@id": "https://uri.fiware.org/ns/datamodels/requiredPermit", + "@type": "@vocab" + }, + "reservation": "https://uri.fiware.org/ns/datamodels/reservation", + "reservationType": { + "@id": "https://uri.fiware.org/ns/datamodels/reservationType", + "@type": "@vocab" + }, + "residence": "https://uri.fiware.org/ns/datamodels/residence", + "residentPermit": "https://uri.fiware.org/ns/datamodels/residentPermit", + "residential": "https://uri.fiware.org/ns/datamodels/residential", + "responsible": "https://uri.fiware.org/ns/datamodels/responsible", + "restArea": "https://uri.fiware.org/ns/datamodels/restArea", + "restaurant": "https://uri.fiware.org/ns/datamodels/restaurant", + "result": { + "@id": "https://uri.fiware.org/ns/datamodels/result", + "@type": "@vocab" + }, + "retail": "https://uri.fiware.org/ns/datamodels/retail", + "reversedLane": "https://uri.fiware.org/ns/datamodels/reversedLane", + "riding_hall": "https://uri.fiware.org/ns/datamodels/riding_hall", + "river": "https://uri.fiware.org/ns/datamodels/river", + "road": "https://uri.fiware.org/ns/datamodels/road", + "roadClass": { + "@id": "https://uri.fiware.org/ns/datamodels/roadClass", + "@type": "@vocab" + }, + "roadClosed": "https://uri.fiware.org/ns/datamodels/roadClosed", + "roadSignalling": "https://uri.fiware.org/ns/datamodels/roadSignalling", + "roadWorks": "https://uri.fiware.org/ns/datamodels/roadWorks", + "robbery": "https://uri.fiware.org/ns/datamodels/robbery", + "rodent": "https://uri.fiware.org/ns/datamodels/rodent", + "roof": "https://uri.fiware.org/ns/datamodels/roof", + "rooftop": "https://uri.fiware.org/ns/datamodels/rooftop", + "root": "https://uri.fiware.org/ns/datamodels/root", + "rosarium": "https://uri.fiware.org/ns/datamodels/rosarium", + "roundedLid": "https://uri.fiware.org/ns/datamodels/roundedLid", + "routeColor": "https://uri.fiware.org/ns/datamodels/routeColor", + "routeId": "https://uri.fiware.org/ns/datamodels/routeId", + "routeSortOrder": "https://uri.fiware.org/ns/datamodels/routeSortOrder", + "routeTextColor": "https://uri.fiware.org/ns/datamodels/routeTextColor", + "routeType": { + "@id": "https://uri.fiware.org/ns/datamodels/routeType", + "@type": "@vocab" + }, + "rssi": "https://uri.fiware.org/ns/datamodels/rssi", + "ruins": "https://uri.fiware.org/ns/datamodels/ruins", + "sacredArt": "https://uri.fiware.org/ns/datamodels/sacredArt", + "safeDeposit": "https://uri.fiware.org/ns/datamodels/safeDeposit", + "salinity": "https://uri.fiware.org/ns/datamodels/salinity", + "sanctuary": "https://uri.fiware.org/ns/datamodels/sanctuary", + "saturday": "https://uri.fiware.org/ns/datamodels/saturday", + "scheduled": "https://uri.fiware.org/ns/datamodels/scheduled", + "school": "https://uri.fiware.org/ns/datamodels/school", + "schoolTransportation": "https://uri.fiware.org/ns/datamodels/schoolTransportation", + "scienceAndTechnology": "https://uri.fiware.org/ns/datamodels/scienceAndTechnology", + "sculpturalGroups": "https://uri.fiware.org/ns/datamodels/sculpturalGroups", + "sculpture": "https://uri.fiware.org/ns/datamodels/sculpture", + "seasonTicket": "https://uri.fiware.org/ns/datamodels/seasonTicket", + "secondary": "https://uri.fiware.org/ns/datamodels/secondary", + "security": "https://uri.fiware.org/ns/datamodels/security", + "securityStaff": "https://uri.fiware.org/ns/datamodels/securityStaff", + "seeAlso": "https://uri.fiware.org/ns/datamodels/seeAlso", + "seeded": "https://uri.fiware.org/ns/datamodels/seeded", + "semiautomatic": "https://uri.fiware.org/ns/datamodels/semiautomatic", + "seminar": "https://uri.fiware.org/ns/datamodels/seminar", + "sensing": "https://uri.fiware.org/ns/datamodels/sensing", + "serialNumber": "https://uri.fiware.org/ns/datamodels/serialNumber", + "service": "https://uri.fiware.org/ns/datamodels/service", + "serviceArea": "https://uri.fiware.org/ns/datamodels/serviceArea", + "serviceProvided": { + "@id": "https://uri.fiware.org/ns/datamodels/serviceProvided", + "@type": "@vocab" + }, + "serviceStatus": { + "@id": "https://uri.fiware.org/ns/datamodels/serviceStatus", + "@type": "@vocab" + }, + "severity": { + "@id": "https://uri.fiware.org/ns/datamodels/severity", + "@type": "@vocab" + }, + "sex": { + "@id": "https://uri.fiware.org/ns/datamodels/sex", + "@type": "@vocab" + }, + "shape": { + "@id": "https://uri.fiware.org/ns/datamodels/shape", + "@type": "@vocab" + }, + "shed": "https://uri.fiware.org/ns/datamodels/shed", + "sheds": "https://uri.fiware.org/ns/datamodels/sheds", + "sheep": "https://uri.fiware.org/ns/datamodels/sheep", + "shop": "https://uri.fiware.org/ns/datamodels/shop", + "shoppingCentre": "https://uri.fiware.org/ns/datamodels/shoppingCentre", + "shortName": "https://uri.fiware.org/ns/datamodels/shortName", + "shortTerm": "https://uri.fiware.org/ns/datamodels/shortTerm", + "shower": "https://uri.fiware.org/ns/datamodels/shower", + "showers": "https://uri.fiware.org/ns/datamodels/showers", + "shrine": "https://uri.fiware.org/ns/datamodels/shrine", + "sick": "https://uri.fiware.org/ns/datamodels/sick", + "sideEntry": "https://uri.fiware.org/ns/datamodels/sideEntry", + "sidewalk": "https://uri.fiware.org/ns/datamodels/sidewalk", + "signLight": "https://uri.fiware.org/ns/datamodels/signLight", + "signalStrength": { + "@id": "https://uri.fiware.org/ns/datamodels/signalStrength", + "@type": "@vocab" + }, + "singleLevel": "https://uri.fiware.org/ns/datamodels/singleLevel", + "singleSpaceDetection": "https://uri.fiware.org/ns/datamodels/singleSpaceDetection", + "siredBy": { + "@id": "https://uri.fiware.org/ns/datamodels/siredBy", + "@type": "@id" + }, + "skilift": "https://uri.fiware.org/ns/datamodels/skilift", + "smoke": "https://uri.fiware.org/ns/datamodels/smoke", + "snail": "https://uri.fiware.org/ns/datamodels/snail", + "snow": "https://uri.fiware.org/ns/datamodels/snow", + "snow/ice": "https://uri.fiware.org/ns/datamodels/snow/ice", + "snowHeight": "https://uri.fiware.org/ns/datamodels/snowHeight", + "socketNumber": "https://uri.fiware.org/ns/datamodels/socketNumber", + "socketType": { + "@id": "https://uri.fiware.org/ns/datamodels/socketType", + "@type": "@vocab" + }, + "softwareVersion": "https://uri.fiware.org/ns/datamodels/softwareVersion", + "soilMoisture": "https://uri.fiware.org/ns/datamodels/soilMoisture", + "soilMoistureEC": "https://uri.fiware.org/ns/datamodels/soilMoistureEC", + "soilMoistureEc": "https://uri.fiware.org/ns/datamodels/soilMoistureEc", + "soilMoistureVwc": "https://uri.fiware.org/ns/datamodels/soilMoistureVwc", + "soilSalinity": "https://uri.fiware.org/ns/datamodels/soilSalinity", + "soilTemperature": "https://uri.fiware.org/ns/datamodels/soilTemperature", + "solarRadiation": "https://uri.fiware.org/ns/datamodels/solarRadiation", + "solarRadiaton": "https://uri.fiware.org/ns/datamodels/solarRadiaton", + "sonometerClass": { + "@id": "https://uri.fiware.org/ns/datamodels/sonometerClass", + "@type": "@vocab" + }, + "source": "https://uri.fiware.org/ns/datamodels/source", + "spacesAvailable": "https://uri.fiware.org/ns/datamodels/spacesAvailable", + "specialLocation": { + "@id": "https://uri.fiware.org/ns/datamodels/specialLocation", + "@type": "@vocab" + }, + "specialTransport": "https://uri.fiware.org/ns/datamodels/specialTransport", + "specialUsage": "https://uri.fiware.org/ns/datamodels/specialUsage", + "specials": "https://uri.fiware.org/ns/datamodels/specials", + "species": { + "@id": "https://uri.fiware.org/ns/datamodels/species", + "@type": "@vocab" + }, + "specificFacility": "https://uri.fiware.org/ns/datamodels/specificFacility", + "specificIdentifiedVehiclePermit": "https://uri.fiware.org/ns/datamodels/specificIdentifiedVehiclePermit", + "speed": "https://uri.fiware.org/ns/datamodels/speed", + "spring": "https://uri.fiware.org/ns/datamodels/spring", + "square": "https://uri.fiware.org/ns/datamodels/square", + "stable": "https://uri.fiware.org/ns/datamodels/stable", + "stadium": "https://uri.fiware.org/ns/datamodels/stadium", + "staffGuidesToSpace": "https://uri.fiware.org/ns/datamodels/staffGuidesToSpace", + "staffed": "https://uri.fiware.org/ns/datamodels/staffed", + "startDate": "https://uri.fiware.org/ns/datamodels/startDate", + "startKilometer": "https://uri.fiware.org/ns/datamodels/startKilometer", + "startPoint": "https://uri.fiware.org/ns/datamodels/startPoint", + "startTime": "https://uri.fiware.org/ns/datamodels/startTime", + "startedAt": { + "@id": "https://uri.fiware.org/ns/datamodels/startedAt", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "static_caravan": "https://uri.fiware.org/ns/datamodels/static_caravan", + "steady": "https://uri.fiware.org/ns/datamodels/steady", + "steel": "https://uri.fiware.org/ns/datamodels/steel", + "stopHeadsign": "https://uri.fiware.org/ns/datamodels/stopHeadsign", + "stopId": "https://uri.fiware.org/ns/datamodels/stopId", + "stopSequence": "https://uri.fiware.org/ns/datamodels/stopSequence", + "storedWasteCode": "https://uri.fiware.org/ns/datamodels/storedWasteCode", + "storedWasteKind": { + "@id": "https://uri.fiware.org/ns/datamodels/storedWasteKind", + "@type": "@vocab" + }, + "storedWasteOrigin": { + "@id": "https://uri.fiware.org/ns/datamodels/storedWasteOrigin", + "@type": "@vocab" + }, + "streamGauge": "https://uri.fiware.org/ns/datamodels/streamGauge", + "streetCleaning": "https://uri.fiware.org/ns/datamodels/streetCleaning", + "streetLighting": "https://uri.fiware.org/ns/datamodels/streetLighting", + "strongWaves": "https://uri.fiware.org/ns/datamodels/strongWaves", + "studentPermit": "https://uri.fiware.org/ns/datamodels/studentPermit", + "sty": "https://uri.fiware.org/ns/datamodels/sty", + "style": { + "@id": "https://uri.fiware.org/ns/datamodels/style", + "@type": "@vocab" + }, + "subCategory": "https://uri.fiware.org/ns/datamodels/subCategory", + "sumptuaryArts": "https://uri.fiware.org/ns/datamodels/sumptuaryArts", + "sunLoungerRental": "https://uri.fiware.org/ns/datamodels/sunLoungerRental", + "sunday": "https://uri.fiware.org/ns/datamodels/sunday", + "sunshadeRental": "https://uri.fiware.org/ns/datamodels/sunshadeRental", + "supportedUnits": "https://uri.fiware.org/ns/datamodels/supportedUnits", + "surfPracticeArea": "https://uri.fiware.org/ns/datamodels/surfPracticeArea", + "surface": "https://uri.fiware.org/ns/datamodels/surface", + "suspiciousAction": "https://uri.fiware.org/ns/datamodels/suspiciousAction", + "sweepingMachine": "https://uri.fiware.org/ns/datamodels/sweepingMachine", + "switchingMode": { + "@id": "https://uri.fiware.org/ns/datamodels/switchingMode", + "@type": "@vocab" + }, + "switchingOnHours": "https://uri.fiware.org/ns/datamodels/switchingOnHours", + "synagogue": "https://uri.fiware.org/ns/datamodels/synagogue", + "tanker": "https://uri.fiware.org/ns/datamodels/tanker", + "taulasTalayotsNavetas": "https://uri.fiware.org/ns/datamodels/taulasTalayotsNavetas", + "taxi": "https://uri.fiware.org/ns/datamodels/taxi", + "taxon": "https://uri.fiware.org/ns/datamodels/taxon", + "tds": "https://uri.fiware.org/ns/datamodels/tds", + "telephone": "https://uri.fiware.org/ns/datamodels/telephone", + "temperature": "https://uri.fiware.org/ns/datamodels/temperature", + "temple": "https://uri.fiware.org/ns/datamodels/temple", + "temporaryPrice": "https://uri.fiware.org/ns/datamodels/temporaryPrice", + "terrace": "https://uri.fiware.org/ns/datamodels/terrace", + "tertiary": "https://uri.fiware.org/ns/datamodels/tertiary", + "textile": "https://uri.fiware.org/ns/datamodels/textile", + "thdCurrent": "https://uri.fiware.org/ns/datamodels/thdCurrent", + "thdVoltage": "https://uri.fiware.org/ns/datamodels/thdVoltage", + "thdrIntensity": "https://uri.fiware.org/ns/datamodels/thdrIntensity", + "thdrVoltage": "https://uri.fiware.org/ns/datamodels/thdrVoltage", + "theathre": "https://uri.fiware.org/ns/datamodels/theathre", + "thematic": "https://uri.fiware.org/ns/datamodels/thematic", + "themePark": "https://uri.fiware.org/ns/datamodels/themePark", + "thunderstorms": "https://uri.fiware.org/ns/datamodels/thunderstorms", + "thursday": "https://uri.fiware.org/ns/datamodels/thursday", + "timepoint": { + "@id": "https://uri.fiware.org/ns/datamodels/timepoint", + "@type": "@vocab" + }, + "timezone": "https://uri.fiware.org/ns/datamodels/timezone", + "toilet": "https://uri.fiware.org/ns/datamodels/toilet", + "toilets": "https://uri.fiware.org/ns/datamodels/toilets", + "toll": "https://uri.fiware.org/ns/datamodels/toll", + "tollTerminal": "https://uri.fiware.org/ns/datamodels/tollTerminal", + "tornado": "https://uri.fiware.org/ns/datamodels/tornado", + "totalActiveEnergyExport": "https://uri.fiware.org/ns/datamodels/totalActiveEnergyExport", + "totalActiveEnergyImport": "https://uri.fiware.org/ns/datamodels/totalActiveEnergyImport", + "totalActivePower": "https://uri.fiware.org/ns/datamodels/totalActivePower", + "totalApparentEnergyExport": "https://uri.fiware.org/ns/datamodels/totalApparentEnergyExport", + "totalApparentEnergyImport": "https://uri.fiware.org/ns/datamodels/totalApparentEnergyImport", + "totalApparentPower": "https://uri.fiware.org/ns/datamodels/totalApparentPower", + "totalDisplacementPowerFactor": "https://uri.fiware.org/ns/datamodels/totalDisplacementPowerFactor", + "totalLaneNumber": "https://uri.fiware.org/ns/datamodels/totalLaneNumber", + "totalPowerFactor": "https://uri.fiware.org/ns/datamodels/totalPowerFactor", + "totalReactiveEnergyExport": "https://uri.fiware.org/ns/datamodels/totalReactiveEnergyExport", + "totalReactiveEnergyImport": "https://uri.fiware.org/ns/datamodels/totalReactiveEnergyImport", + "totalReactivePower": "https://uri.fiware.org/ns/datamodels/totalReactivePower", + "totalSlotNumber": "https://uri.fiware.org/ns/datamodels/totalSlotNumber", + "totalSpotNumber": "https://uri.fiware.org/ns/datamodels/totalSpotNumber", + "touristArea": "https://uri.fiware.org/ns/datamodels/touristArea", + "touristOffice": "https://uri.fiware.org/ns/datamodels/touristOffice", + "tower": "https://uri.fiware.org/ns/datamodels/tower", + "town": "https://uri.fiware.org/ns/datamodels/town", + "tracked": "https://uri.fiware.org/ns/datamodels/tracked", + "traffic": "https://uri.fiware.org/ns/datamodels/traffic", + "trafficFlow": "https://uri.fiware.org/ns/datamodels/trafficFlow", + "trafficJam": "https://uri.fiware.org/ns/datamodels/trafficJam", + "trailer": "https://uri.fiware.org/ns/datamodels/trailer", + "trainStation": "https://uri.fiware.org/ns/datamodels/trainStation", + "train_station": "https://uri.fiware.org/ns/datamodels/train_station", + "tram": "https://uri.fiware.org/ns/datamodels/tram", + "transferType": { + "@id": "https://uri.fiware.org/ns/datamodels/transferType", + "@type": "@vocab" + }, + "transformer_tower": "https://uri.fiware.org/ns/datamodels/transformer_tower", + "transportation": "https://uri.fiware.org/ns/datamodels/transportation", + "transports": "https://uri.fiware.org/ns/datamodels/transports", + "trashCan": "https://uri.fiware.org/ns/datamodels/trashCan", + "trolley": "https://uri.fiware.org/ns/datamodels/trolley", + "tropicalCyclone": "https://uri.fiware.org/ns/datamodels/tropicalCyclone", + "truck": "https://uri.fiware.org/ns/datamodels/truck", + "truckParking": "https://uri.fiware.org/ns/datamodels/truckParking", + "trunk": "https://uri.fiware.org/ns/datamodels/trunk", + "tss": "https://uri.fiware.org/ns/datamodels/tss", + "tsunami": "https://uri.fiware.org/ns/datamodels/tsunami", + "tuesday": "https://uri.fiware.org/ns/datamodels/tuesday", + "tunnel": "https://uri.fiware.org/ns/datamodels/tunnel", + "turbidity": "https://uri.fiware.org/ns/datamodels/turbidity", + "uVIndexMax": "https://uri.fiware.org/ns/datamodels/uVIndexMax", + "unclassified": "https://uri.fiware.org/ns/datamodels/unclassified", + "underground": "https://uri.fiware.org/ns/datamodels/underground", + "unesco": "https://uri.fiware.org/ns/datamodels/unesco", + "universitary": "https://uri.fiware.org/ns/datamodels/universitary", + "university": "https://uri.fiware.org/ns/datamodels/university", + "unknown": "https://uri.fiware.org/ns/datamodels/unknown", + "updatedAt": { + "@id": "https://uri.fiware.org/ns/datamodels/updatedAt", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "urban": "https://uri.fiware.org/ns/datamodels/urban", + "urbanDeterrentParking": "https://uri.fiware.org/ns/datamodels/urbanDeterrentParking", + "urbanTransit": "https://uri.fiware.org/ns/datamodels/urbanTransit", + "urbanTreeSpot": "https://uri.fiware.org/ns/datamodels/urbanTreeSpot", + "url": "https://uri.fiware.org/ns/datamodels/url", + "usageScenario": { + "@id": "https://uri.fiware.org/ns/datamodels/usageScenario", + "@type": "@vocab" + }, + "validFrom": { + "@id": "https://uri.fiware.org/ns/datamodels/validFrom", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "validTo": { + "@id": "https://uri.fiware.org/ns/datamodels/validTo", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "validity": "https://uri.fiware.org/ns/datamodels/validity", + "van": "https://uri.fiware.org/ns/datamodels/van", + "vehicleConfiguration": "https://uri.fiware.org/ns/datamodels/vehicleConfiguration", + "vehicleEngine": "https://uri.fiware.org/ns/datamodels/vehicleEngine", + "vehicleIdentificationNumber": "https://uri.fiware.org/ns/datamodels/vehicleIdentificationNumber", + "vehicleLift": "https://uri.fiware.org/ns/datamodels/vehicleLift", + "vehicleModelDate": { + "@id": "https://uri.fiware.org/ns/datamodels/vehicleModelDate", + "@type": "https://uri.etsi.org/ngsi-ld/DateTime" + }, + "vehicleOnRailTerminal": "https://uri.fiware.org/ns/datamodels/vehicleOnRailTerminal", + "vehiclePlateIdentifier": "https://uri.fiware.org/ns/datamodels/vehiclePlateIdentifier", + "vehicleSpecialUsage": { + "@id": "https://uri.fiware.org/ns/datamodels/vehicleSpecialUsage", + "@type": "@vocab" + }, + "vehicleSubType": "https://uri.fiware.org/ns/datamodels/vehicleSubType", + "vehicleType": { + "@id": "https://uri.fiware.org/ns/datamodels/vehicleType", + "@type": "@vocab" + }, + "vendingMachine": "https://uri.fiware.org/ns/datamodels/vendingMachine", + "version": "https://uri.fiware.org/ns/datamodels/version", + "veryBad": "https://uri.fiware.org/ns/datamodels/veryBad", + "veryGood": "https://uri.fiware.org/ns/datamodels/veryGood", + "veryHigh": "https://uri.fiware.org/ns/datamodels/veryHigh", + "virus": "https://uri.fiware.org/ns/datamodels/virus", + "visitorPermit": "https://uri.fiware.org/ns/datamodels/visitorPermit", + "voltage": "https://uri.fiware.org/ns/datamodels/voltage", + "walledArea": "https://uri.fiware.org/ns/datamodels/walledArea", + "walls": "https://uri.fiware.org/ns/datamodels/walls", + "warehouse": "https://uri.fiware.org/ns/datamodels/warehouse", + "wasteContainerCleaning": "https://uri.fiware.org/ns/datamodels/wasteContainerCleaning", + "wasteDisposal": "https://uri.fiware.org/ns/datamodels/wasteDisposal", + "water": "https://uri.fiware.org/ns/datamodels/water", + "water dam": "https://uri.fiware.org/ns/datamodels/water dam", + "waterConsumption": "https://uri.fiware.org/ns/datamodels/waterConsumption", + "waterCraftRental": "https://uri.fiware.org/ns/datamodels/waterCraftRental", + "waterPollution": "https://uri.fiware.org/ns/datamodels/waterPollution", + "waterSource": { + "@id": "https://uri.fiware.org/ns/datamodels/waterSource", + "@type": "@vocab" + }, + "water_tower": "https://uri.fiware.org/ns/datamodels/water_tower", + "wateringFrequency": { + "@id": "https://uri.fiware.org/ns/datamodels/wateringFrequency", + "@type": "@vocab" + }, + "wax": "https://uri.fiware.org/ns/datamodels/wax", + "weather": "https://uri.fiware.org/ns/datamodels/weather", + "weatherConditions": "https://uri.fiware.org/ns/datamodels/weatherConditions", + "wednesday": "https://uri.fiware.org/ns/datamodels/wednesday", + "weekly": "https://uri.fiware.org/ns/datamodels/weekly", + "weight": "https://uri.fiware.org/ns/datamodels/weight", + "welfareCondition": { + "@id": "https://uri.fiware.org/ns/datamodels/welfareCondition", + "@type": "@vocab" + }, + "wheelChairAccessible": { + "@id": "https://uri.fiware.org/ns/datamodels/wheelChairAccessible", + "@type": "@vocab" + }, + "wheelieBin": "https://uri.fiware.org/ns/datamodels/wheelieBin", + "wheels": "https://uri.fiware.org/ns/datamodels/wheels", + "whiteSand": "https://uri.fiware.org/ns/datamodels/whiteSand", + "width": "https://uri.fiware.org/ns/datamodels/width", + "wifi": "https://uri.fiware.org/ns/datamodels/wifi", + "wind": "https://uri.fiware.org/ns/datamodels/wind", + "windDirection": "https://uri.fiware.org/ns/datamodels/windDirection", + "windSpeed": "https://uri.fiware.org/ns/datamodels/windSpeed", + "windy": "https://uri.fiware.org/ns/datamodels/windy", + "withIncidence": "https://uri.fiware.org/ns/datamodels/withIncidence", + "wood": "https://uri.fiware.org/ns/datamodels/wood", + "workOrder": "https://uri.fiware.org/ns/datamodels/workOrder", + "workRecord": "https://uri.fiware.org/ns/datamodels/workRecord", + "working": "https://uri.fiware.org/ns/datamodels/working", + "workingLife": "https://uri.fiware.org/ns/datamodels/workingLife", + "workingMode": { + "@id": "https://uri.fiware.org/ns/datamodels/workingMode", + "@type": "@vocab" + }, + "yearly": "https://uri.fiware.org/ns/datamodels/yearly", + "zen": "https://uri.fiware.org/ns/datamodels/zen", + "zoneCode": "https://uri.fiware.org/ns/datamodels/zoneCode" + }, + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld" + ] +} diff --git a/scorpio-broker/Examples/translateheidelbergtunnels.json b/scorpio-broker/Examples/translateheidelbergtunnels.json new file mode 100644 index 0000000000000000000000000000000000000000..7e710e3c720d472a874a4c492aec9607dc7333ac --- /dev/null +++ b/scorpio-broker/Examples/translateheidelbergtunnels.json @@ -0,0 +1,23 @@ +{ + "translate": + { + "id": "$$urn:smartcity:heidelberg:tunel:.id", + "type.&&&&": "https://uri.fiware.org/ns/data-models#tunnel", + "location.type.&&&&": "GeoProperty", + "location.value.coordinates.0": "geometry.coordinates.0", + "location.value.coordinates.1": "geometry.coordinates.1", + "location.value.type": "geometry.type", + "name.value": "properties.name", + "maximumAllowedSpeed.value": "properties.maxspeed", + "maximumAllowedSpeed.type.&&&&": "Property", + "maximumAllowedWidth.value": "properties.maxwidth", + "maximumAllowedWidth.type.&&&&": "Property", + "@context.0.&&&&": "https://schema.lab.fiware.org/ld/context.jsonld" + }, + "from": "http://daten.geonet-mrn.de:8080/geoserver/ows?srsName=EPSG%3A4326&outputFormat=json&service=WFS&srs=EPSG%3A4326&request=GetFeature&typename=geonode%3Atunnels&version=1.0.0", + "fromheaders": {"Accept":"application/json","Content-Type":"application/json"}, + "to": "http://localhost:19090", + "toheaders": {"Content-Type": "Application/ld+json"}, + "polltime": 3600 +} + diff --git a/scorpio-broker/History/.gitignore b/scorpio-broker/History/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..1f31adb1dcec3b9e26bcfdd4ca40a9050e4840e7 --- /dev/null +++ b/scorpio-broker/History/.gitignore @@ -0,0 +1,5 @@ +/target/ +/logs/ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/History/HistoryManager/.gitignore b/scorpio-broker/History/HistoryManager/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..f78df92cfa362483208520832b8a397063bdceba --- /dev/null +++ b/scorpio-broker/History/HistoryManager/.gitignore @@ -0,0 +1,5 @@ +/logs/ +/target/ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/History/HistoryManager/db-scripts/dml-examples.sql b/scorpio-broker/History/HistoryManager/db-scripts/dml-examples.sql new file mode 100644 index 0000000000000000000000000000000000000000..3e2b9cb8da597cd2ee0b4305251e5ebe553c02c6 --- /dev/null +++ b/scorpio-broker/History/HistoryManager/db-scripts/dml-examples.sql @@ -0,0 +1,515 @@ +begin; + +truncate temporalentity cascade; + +/* +If the NGSI-LD endpoint already knows about this Temporal Representation of an Entity, +because there is an existing Temporal Representation of an Entity whose id (URI) is equivalent, +then all the Attribute instances included by the Temporal Representation shall be added to the +existing Entity as mandated by clause 5.6.12. + +The Attribute (considering term expansion rules as mandated by clause 5.5.7) instance(s) shall be added to the target Entity. +For the avoidance of doubt, if no previous Attribute instances existed, then a new Attribute instance collection shall be +created and added to the Entity. +*/ + +-- DML to create "vehicle-context-expanded.jsonld" + +insert into temporalentity (id, type, createdat, modifiedat) values ( +'urn:ngsi-ld:Vehicle:B9211', +'http://example.org/vehicle/Vehicle', +'2018-08-01T12:03:00Z', +'2018-08-01T12:03:00Z'); + +-- static without instanceId +insert into temporalentityattrinstance (temporalentity_id, attributeid, data) values ( +'urn:ngsi-ld:Vehicle:B9211', +'http://example.org/vehicle/brandName', +' + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "Volvo" + } + ] + } + '); + +-- static with instanceId +insert into temporalentityattrinstance (temporalentity_id, attributeid, data) values ( +'urn:ngsi-ld:Vehicle:B9211', +'http://example.org/vehicle/color', +' + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:91afea8c-23d9-4d6d-9a35-798c28a9db79" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-06-01T12:03:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "Red" + } + ] + } + '); + +insert into temporalentityattrinstance (temporalentity_id, attributeid, data) values ( +'urn:ngsi-ld:Vehicle:B9211', +'http://example.org/vehicle/speed', +' + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:be664aaf-a7af-4a99-bebc-e89528238abf" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-06-01T12:03:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 120 + } + ] + } +'); + +insert into temporalentityattrinstance (temporalentity_id, attributeid, data) values ( +'urn:ngsi-ld:Vehicle:B9211', +'http://example.org/vehicle/speed', +' + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:d3ac28df-977f-4151-a432-dc088f7400d7" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:05:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 80 + } + ] + } +'); + +-- this instance has sub-properties! +insert into temporalentityattrinstance (temporalentity_id, attributeid, data) values ( +'urn:ngsi-ld:Vehicle:B9211', +'http://example.org/vehicle/speed', +' + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:70ac695b-52a3-4dde-8d29-d2d5a2b662f7" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:07:00Z" + } + ], + "http://example.org/vehicle/speedAccuracy": [ + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:c6db2da4-c9a3-41da-83c1-a9e05c7ebc9c" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:07:01Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 5 + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:a6158a85-95e8-4cb2-aadd-bf4b79250884" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:07:02Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 7 + } + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 100 + } + ] + } +'); + +insert into temporalentityattrinstance (temporalentity_id, attributeid, data) values ( +'urn:ngsi-ld:Vehicle:B9211', +'https://uri.etsi.org/ngsi-ld/location', +' { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:cfade4cb-7c71-4135-b69c-24ab83e2afae" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-07-01T12:03:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\":\"Point\", \"coordinates\":[ -8.5, 41.2 ] }" + } + ] + }' +); + +insert into temporalentityattrinstance (temporalentity_id, attributeid, data) values ( +'urn:ngsi-ld:Vehicle:B9211', +'https://uri.etsi.org/ngsi-ld/location', +' { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:37192165-11d9-48fa-9952-b6aab55e5046" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-07-01T12:05:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\":\"Point\", \"coordinates\":[ -8.5, 42.2 ] }" + } + ] + } +'); + +insert into temporalentityattrinstance (temporalentity_id, attributeid, data) values ( +'urn:ngsi-ld:Vehicle:B9211', +'https://uri.etsi.org/ngsi-ld/location', +' + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:0f978241-7d03-4fa9-96e4-094c2c467395" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:07:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\":\"Point\", \"coordinates\":[ -8.5, 43.2 ] }" + } + ] + } +'); + +insert into temporalentityattrinstance (temporalentity_id, attributeid, data) values ( +'urn:ngsi-ld:Vehicle:B9211', +'http://example.org/vehicle/temperature', +' + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "http://example.org/vehicle/testedAt": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-12-04T12:00:00Z" + } + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 10 + } + ] + } +'); + + +insert into temporalentityattrinstance (temporalentity_id, attributeid, data) values ( +'urn:ngsi-ld:Vehicle:B9211', +'http://example.org/vehicle/temperature', +' + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "http://example.org/vehicle/testedAt": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2019-12-04T12:00:00Z" + } + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 15 + } + ] + } +'); + +\echo PATCH instance operation +\echo PATCH http://192.168.56.101:9090/ngsi-ld/v1/temporal/entities/urn:ngsi-ld:Vehicle:B9211/attrs/speed/urn:ngsi-ld:95bea784-4539-4212-a7ba-fb4973870b19 + +insert into temporalentityattrinstance (temporalentity_id, attributeid, data) values ( +'urn:ngsi-ld:Vehicle:B9211', +'http://example.org/vehicle/speed', +' + { + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:d3ac28df-977f-4151-a432-dc088f7400d7" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T17:45:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 82 + } + ] + } +') ON CONFLICT(temporalentity_id, attributeid, instanceid) DO UPDATE SET data = EXCLUDED.data; + +commit; + +begin; + +\echo DELETE http://192.168.56.101:9090/ngsi-ld/v1/temporal/entities/urn:ngsi-ld:Vehicle:B9211/attrs/speed +delete from temporalentityattrinstance where temporalentity_id = 'urn:ngsi-ld:Vehicle:B9211' and attributeid = 'http://example.org/vehicle/speed'; + +rollback; -- just testing + diff --git a/scorpio-broker/History/HistoryManager/db-scripts/how-to.txt b/scorpio-broker/History/HistoryManager/db-scripts/how-to.txt new file mode 100644 index 0000000000000000000000000000000000000000..ccf8085b47d9888c4228ec16b80752db3e932f73 --- /dev/null +++ b/scorpio-broker/History/HistoryManager/db-scripts/how-to.txt @@ -0,0 +1,15 @@ +NGB Database how-to for Ubuntu +Tested in Ubuntu 18.04 + +1. Install PostgreSQL 10 +$ sudo apt install postgresql-10 postgresql-client-10 postgresql-server-dev-10 + +2. Install PostGIS +$ sudo apt install postgresql-10-postgis-2.4 postgresql-10-postgis-scripts + +3. Create ngb user +$ sudo su - postgres +$ psql -c "create user ngb superuser createdb password 'ngb'"; + +4. Create ngb database +$ psql -c "create database ngb owner = ngb"; \ No newline at end of file diff --git a/scorpio-broker/History/HistoryManager/db-scripts/ngsild-query-language-mapping.sql b/scorpio-broker/History/HistoryManager/db-scripts/ngsild-query-language-mapping.sql new file mode 100644 index 0000000000000000000000000000000000000000..dc6a69037c28e3d36ab4fe74df1c685dcb2149a6 --- /dev/null +++ b/scorpio-broker/History/HistoryManager/db-scripts/ngsild-query-language-mapping.sql @@ -0,0 +1,299 @@ +-- first release limitation: filters can only be applied to 1st level attributes (no properties of properties, ...) + +/* +The temporal evolution of an NGSI-LD Property shall be represented as an Array of JSON-LD objects, +each one representing an instance of the Property (as mandated by clause 4.5.2) at a particular point in time. + +If a Property is static (i.e. it has not changed over time) then it shall be represented by an Array with a single instance. +*/ + +\pset pager 0 +-- \set ECHO queries + +\echo id only +\echo /temporal/entities/urn:ngsi-ld:Vehicle:B9211 +with r as ( + select te.id, te.type, te.createdat, te.modifiedat, coalesce(teai.attributeid, '') as attributeid, jsonb_agg(teai.data order by teai.modifiedat desc) as attributedata + from temporalentity te + left join temporalentityattrinstance teai on (teai.temporalentity_id = te.id) + where -- basic query + te.id = 'urn:ngsi-ld:Vehicle:B130' + group by te.id, te.type, te.createdat, te.modifiedat, teai.attributeid + order by te.id, teai.attributeid +) +select id, tedata || case when attrdata <> '{"": [null]}'::jsonb then attrdata else tedata end as data from ( + select id, + ('{"@id":"' || id || '"}')::jsonb || + ('{"@type":["' || type || '"]}')::jsonb || + ('{"https://uri.etsi.org/ngsi-ld/createdAt":[ { "@type": "https://uri.etsi.org/ngsi-ld/DateTime", "@value": "' || createdat || '" }]}')::jsonb || + ('{"https://uri.etsi.org/ngsi-ld/modifiedAt":[ { "@type": "https://uri.etsi.org/ngsi-ld/DateTime", "@value": "' || modifiedat || '" }]}')::jsonb as tedata, + jsonb_object_agg(attributeid, attributedata) as attrdata + from r + group by id, type, createdat, modifiedat + order by modifiedat desc +) as m; + +\echo type + temporal +\echo ?type=Vehicle&timerel=after&time=2018-08-01T00:00:00Z&timeproperty=observedAt +with r as ( + select te.id, te.type, te.createdat, te.modifiedat, coalesce(teai.attributeid, '') as attributeid, jsonb_agg(teai.data order by teai.modifiedat desc) as attributedata + from temporalentity te + left join temporalentityattrinstance teai on (teai.temporalentity_id = te.id) + where -- basic query + te.type = 'http://example.org/vehicle/Vehicle' and + -- temporal query + ( (teai.static = true and teai.observedat is null) or -- temporal filters do not apply to static attributes + teai.observedat >= '2018-08-01T00:00:00Z') + group by te.id, te.type, te.createdat, te.modifiedat, teai.attributeid + order by te.id, teai.attributeid +) +select tedata || case when attrdata <> '{"": [null]}'::jsonb then attrdata else tedata end as data from ( + select ('{"@id":"' || id || '"}')::jsonb || + ('{"@type":["' || type || '"]}')::jsonb || + ('{"https://uri.etsi.org/ngsi-ld/createdAt":[ { "@type": "https://uri.etsi.org/ngsi-ld/DateTime", "@value": "' || createdat || '" }]}')::jsonb || + ('{"https://uri.etsi.org/ngsi-ld/modifiedAt":[ { "@type": "https://uri.etsi.org/ngsi-ld/DateTime", "@value": "' || modifiedat || '" }]}')::jsonb as tedata, + jsonb_object_agg(attributeid, attributedata) as attrdata + from r + group by id, type, createdat, modifiedat + order by modifiedat desc +) as m; + +\echo type + temporal before +\echo ?type=Vehicle&timerel=before&time=2018-08-01T00:00:00Z&timeproperty=observedAt +select te.id, te.type, te.createdat, te.modifiedat, teai.attributeid, jsonb_agg(teai.data order by teai.modifiedat desc) as attributedata +from temporalentity te +left join temporalentityattrinstance teai on (teai.temporalentity_id = te.id) +where +-- basic query +te.type = 'http://example.org/vehicle/Vehicle' and +-- temporal query +((teai.static = true and teai.observedat is null) or + teai.observedat <= '2018-08-01T00:00:00Z') +group by te.id, te.type, te.createdat, te.modifiedat, teai.attributeid; + + +\echo type + temporal after +\echo ?type=Vehicle&timerel=after&time=2018-08-01T00:00:00Z&timeproperty=observedAt +select te.id, te.type, te.createdat, te.modifiedat, teai.attributeid, jsonb_agg(teai.data order by teai.modifiedat desc) as attributedata +from temporalentity te +left join temporalentityattrinstance teai on (teai.temporalentity_id = te.id) +where +-- basic query +te.type = 'http://example.org/vehicle/Vehicle' and +-- temporal query +((teai.static = true and teai.observedat is null) or + teai.observedat >= '2018-08-01T00:00:00Z') +group by te.id, te.type, te.createdat, te.modifiedat, teai.attributeid; + +\echo type + temporal between +\echo ?type=Vehicle&timerel=between&time=2019-08-01T00:00:00Z&endtime==2014-08-01T00:00:00Z&timeproperty=observedAt +select te.id, te.type, te.createdat, te.modifiedat, teai.attributeid, jsonb_agg(teai.data order by teai.modifiedat desc) as attributedata +from temporalentity te +left join temporalentityattrinstance teai on (teai.temporalentity_id = te.id) +where +-- basic query +te.type = 'http://example.org/vehicle/Vehicle' and +-- temporal query +((teai.static = true and teai.observedat is null) or + teai.observedat between '2014-08-01T00:00:00Z' and '2019-08-01T00:00:00Z') +group by te.id, te.type, te.createdat, te.modifiedat, teai.attributeid; + + +\echo attrs + temporal +\echo ?attrs=speed,brandName&timerel=after&time=2018-08-01T00:00:00Z&timeproperty=observedAt +select te.id, te.type, te.createdat, te.modifiedat, teai.attributeid, jsonb_agg(teai.data order by teai.modifiedat desc) as attributedata +from temporalentity te +left join temporalentityattrinstance teai on (teai.temporalentity_id = te.id) +where +-- attrs query +teai.attributeid in ('http://example.org/vehicle/speed', 'http://example.org/vehicle/brandName') and +-- temporal query +((teai.static = true and teai.observedat is null) or + teai.observedat >= '2018-08-01T00:00:00Z') +group by te.id, te.type, te.createdat, te.modifiedat, teai.attributeid; + +\echo type + attrs + temporal +\echo ?type=Vehicle&attrs=speed,brandName&timerel=after&time=2018-08-01T00:00:00Z&timeproperty=observedAt +select te.id, te.type, te.createdat, te.modifiedat, teai.attributeid, jsonb_agg(teai.data order by teai.modifiedat desc) as attributedata +from temporalentity te +left join temporalentityattrinstance teai on (teai.temporalentity_id = te.id) +where -- basic query + te.type = 'http://example.org/vehicle/Vehicle' and + -- attrs query + teai.attributeid in ('http://example.org/vehicle/speed', 'http://example.org/vehicle/brandName') and + -- temporal query + ((teai.static = true and teai.observedat is null) or + teai.observedat >= '2018-08-01T00:00:00Z') +group by te.id, te.type, te.createdat, te.modifiedat, teai.attributeid; + +\echo example from appendix C.5.5.2: +\echo type + attrs + temporal + advanced query +\echo ?type=Vehicle&q=brandName!=Mercedes&attrs=speed,brandName&timerel=between&time=2018-08-01:12:00:00Z&endTime=2018-08-01:13:00:00Z +select te.id, te.type, te.createdat, te.modifiedat, teai.attributeid, jsonb_agg(teai.data order by teai.modifiedat desc) as attributedata +from temporalentity te +left join temporalentityattrinstance teai on (teai.temporalentity_id = te.id) +where -- basic query (id, type, idPattern) + te.type = 'http://example.org/vehicle/Vehicle' and + -- attrs query + teai.attributeid in ('http://example.org/vehicle/speed', 'http://example.org/vehicle/brandName') and + -- temporal query + ((teai.static = true and teai.observedat is null) or -- temporal filters do not apply to static attributes + teai.observedat >= '2018-08-01T00:00:00Z') and + -- advanced query + (teai.attributeid != 'http://example.org/vehicle/brandName' or + (teai.attributeid = 'http://example.org/vehicle/brandName' and + teai.value <> '"Mercedes"')) +group by te.id, te.type, te.createdat, te.modifiedat, teai.attributeid; + +\echo type + temporal + advanced query +\echo ?type=Vehicle&q=brandName!=Mercedes&timerel=after&time=2018-08-01T00:00:00Z&timeproperty=observedAt +select te.id, te.type, te.createdat, te.modifiedat, teai.attributeid, jsonb_agg(teai.data order by teai.modifiedat desc) as attributedata +from temporalentity te +left join temporalentityattrinstance teai on (teai.temporalentity_id = te.id) +where -- basic query + te.type = 'http://example.org/vehicle/Vehicle' and + -- temporal query + ((teai.static = true and teai.observedat is null) or + teai.observedat >= '2018-08-01T00:00:00Z') and + -- advanced query + (teai.attributeid != 'http://example.org/vehicle/brandName' or + (teai.attributeid = 'http://example.org/vehicle/brandName' and + teai.value <> '"Mercedes"')) +group by te.id, te.type, te.createdat, te.modifiedat, teai.attributeid; + + +-- If geo-query is present, from S2, select those Entities whose GeoProperty instances meet the geospatial restrictions +-- imposed by the geo-query (as mandated by clause 4.10); those geospatial restrictions shall be checked against the GeoProperty +-- instances that are within the interval defined by the temporal query. Let S3 be this new subset. +\echo type + temporal + geoquery +\echo ?type=Vehicle&timerel=after&time=2018-08-01T00:00:00Z&geoproperty=location&georel=near;maxDistance==360&geometry=Point&coordinates=%5B-8.5%2C43.2%5D +with r as ( + select te.id, te.type, te.createdat, te.modifiedat, teai.attributeid, jsonb_agg(teai.data order by teai.modifiedat desc) as attributedata + from temporalentity te + left join temporalentityattrinstance teai on (teai.temporalentity_id = te.id) + where -- basic query + te.type = 'http://example.org/vehicle/Vehicle' and + -- temporal query + ((teai.static = true and teai.observedat is null) or + teai.observedat >= '2018-08-01T00:00:00Z') + group by te.id, te.type, te.createdat, te.modifiedat, teai.attributeid +) +select ('{"@id":"' || id || '"}')::jsonb || + ('{"@type":["' || type || '"]}')::jsonb || + jsonb_object_agg(attributeid, attributedata) as data +from r +--geoquery +where exists ( + select 1 + from temporalentityattrinstance + where temporalentity_id = r.id and + attributeid = 'https://uri.etsi.org/ngsi-ld/location' and + attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' and + ((static = true and observedat is null) or + observedat >= '2018-08-01T00:00:00Z') and + ST_DWithin( + geovalue::geography, + ST_GeomFromGeoJSON( '{ + "type": "Point", + "coordinates": [ + -8.5,43.2 + ] + }')::geography, + 360 + ) +) +group by id, type; + +\echo type + temporal + custom timeproperty +\echo ?type=Vehicle&timerel=after&time=2019-08-01T00:00:00Z&timeproperty=testedAt +with r as ( + select te.id, te.type, te.createdat, te.modifiedat, teai.attributeid, jsonb_agg(teai.data order by teai.modifiedat desc) as attributedata + from temporalentity te + left join temporalentityattrinstance teai on (teai.temporalentity_id = te.id) + where -- basic query + te.type = 'http://example.org/vehicle/Vehicle' and + -- temporal query + ((teai.static = true and data?'http://example.org/vehicle/testedAt' = false) or + ( + data#>>'{http://example.org/vehicle/testedAt,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@type}' = 'https://uri.etsi.org/ngsi-ld/DateTime' and + (data#>>'{http://example.org/vehicle/testedAt,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')::timestamp >= '2019-08-01T00:00:00Z'::timestamp + ) + ) + group by te.id, te.type, te.createdat, te.modifiedat, teai.attributeid +) +select ('{"@id":"' || id || '"}')::jsonb || + ('{"@type":["' || type || '"]}')::jsonb || + jsonb_object_agg(attributeid, attributedata) as data +from r +group by id, type; + +\echo type + temporal + geoquery + custom timeproperty +\echo ?type=Vehicle&timerel=after&time=2019-08-01T00:00:00Z&timeproperty=testedAt&geoproperty=location&georel=near;maxDistance==360&geometry=Point&coordinates=%5B-8.5%2C43.2%5D +with r as ( + select te.id, te.type, te.createdat, te.modifiedat, teai.attributeid, jsonb_agg(teai.data order by teai.modifiedat desc) as attributedata + from temporalentity te + left join temporalentityattrinstance teai on (teai.temporalentity_id = te.id) + where -- basic query + te.type = 'http://example.org/vehicle/Vehicle' and + -- temporal query + ((teai.static = true and data?'http://example.org/vehicle/testedAt' = false) or + ( + data#>>'{http://example.org/vehicle/testedAt,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@type}' = 'https://uri.etsi.org/ngsi-ld/DateTime' and + (data#>>'{http://example.org/vehicle/testedAt,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')::timestamp >= '2019-08-01T00:00:00Z'::timestamp + ) + ) + group by te.id, te.type, te.createdat, te.modifiedat, teai.attributeid +) +select ('{"@id":"' || id || '"}')::jsonb || + ('{"@type":["' || type || '"]}')::jsonb || + jsonb_object_agg(attributeid, attributedata) as data +from r +--geoquery +where exists ( + select 1 + from temporalentityattrinstance + where temporalentity_id = r.id and + attributeid = 'https://uri.etsi.org/ngsi-ld/location' and + attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' and + ((static = true and data?'http://example.org/vehicle/testedAt' = false) or + ( + data#>>'{http://example.org/vehicle/testedAt,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@type}' = 'https://uri.etsi.org/ngsi-ld/DateTime' and + (data#>>'{http://example.org/vehicle/testedAt,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')::timestamp >= '2019-08-01T00:00:00Z'::timestamp + ) + ) and + ST_DWithin( + geovalue::geography, + ST_GeomFromGeoJSON( '{ + "type": "Point", + "coordinates": [ + -8.5,43.2 + ] + }')::geography, + 360 + ) +) +group by id, type; + +-- advanced query using jsonb operator +\echo type + temporal + advanced query +\echo ?type=Vehicle&timerel=after&time=2000-08-01T00:00:00Z&q=speed>=100 +with r as ( + select te.id, te.type, te.createdat, te.modifiedat, teai.attributeid, jsonb_agg(teai.data order by teai.modifiedat desc) as attributedata + from temporalentity te + left join temporalentityattrinstance teai on (teai.temporalentity_id = te.id) + where -- basic query + te.type = 'http://example.org/vehicle/Vehicle' and + -- temporal query + ( (teai.static = true and teai.observedat is null) or -- temporal filters do not apply to static attributes + teai.observedat >= '2000-08-01T00:00:00Z') and + -- advanced query + (teai.attributeid = 'http://example.org/vehicle/speed' and + teai.data@>'{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}' and + teai.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' >= '100'::jsonb) + group by te.id, te.type, te.createdat, te.modifiedat, teai.attributeid +) +select ('{"@id":"' || id || '"}')::jsonb || + ('{"@type":["' || type || '"]}')::jsonb || + jsonb_object_agg(attributeid, attributedata) as data +from r +group by id, type; diff --git a/scorpio-broker/History/HistoryManager/db-scripts/vehicle-context-expanded.jsonld b/scorpio-broker/History/HistoryManager/db-scripts/vehicle-context-expanded.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..3c714e4c75cced420c445a062eab5e244df3fb59 --- /dev/null +++ b/scorpio-broker/History/HistoryManager/db-scripts/vehicle-context-expanded.jsonld @@ -0,0 +1,419 @@ +{ + "http://example.org/vehicle/brandName": [ + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "Volvo" + } + ] + } + ], + "http://example.org/vehicle/color": [ + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:91afea8c-23d9-4d6d-9a35-798c28a9db79" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-06-01T12:03:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "Red" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "@id": "urn:ngsi-ld:Vehicle:B9211", + "https://uri.etsi.org/ngsi-ld/location": [ + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:cfade4cb-7c71-4135-b69c-24ab83e2afae" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-07-01T12:03:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\":\"Point\", \"coordinates\":[ -8.5, 41.2 ] }" + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:37192165-11d9-48fa-9952-b6aab55e5046" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-07-01T12:05:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\":\"Point\", \"coordinates\":[ -8.5, 42.2 ] }" + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:0f978241-7d03-4fa9-96e4-094c2c467395" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:07:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\":\"Point\", \"coordinates\":[ -8.5, 43.2 ] }" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "http://example.org/vehicle/speed": [ + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:be664aaf-a7af-4a99-bebc-e89528238abf" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-06-01T12:03:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 120 + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:d3ac28df-977f-4151-a432-dc088f7400d7" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:05:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 80 + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:70ac695b-52a3-4dde-8d29-d2d5a2b662f7" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:07:00Z" + } + ], + "http://example.org/vehicle/speedAccuracy": [ + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:c6db2da4-c9a3-41da-83c1-a9e05c7ebc9c" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:07:01Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 5 + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/instanceId": [ + { + "@id": "urn:ngsi-ld:a6158a85-95e8-4cb2-aadd-bf4b79250884" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:07:02Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 7 + } + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 100 + } + ] + } + ], + "http://example.org/vehicle/temperature": [ + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "http://example.org/vehicle/testedAt": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-12-04T12:00:00Z" + } + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 10 + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-08-01T12:03:00Z" + } + ], + "http://example.org/vehicle/testedAt": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2019-12-04T12:00:00Z" + } + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 15 + } + ] + } + ], + "@type": [ + "http://example.org/vehicle/Vehicle" + ] +} \ No newline at end of file diff --git a/scorpio-broker/History/HistoryManager/db-scripts/vehicle.jsonld b/scorpio-broker/History/HistoryManager/db-scripts/vehicle.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..6f0ccc78b3fd92ae059b805236421d78ab5104ce --- /dev/null +++ b/scorpio-broker/History/HistoryManager/db-scripts/vehicle.jsonld @@ -0,0 +1,138 @@ +{ + "@context": [ + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + { + "Vehicle":"http://example.org/vehicle/Vehicle", + "brandName": "http://example.org/vehicle/brandName", + "color": "http://example.org/vehicle/color", + "speed": "http://example.org/vehicle/speed", + "speedAccuracy": "http://example.org/vehicle/speedAccuracy", + "temperature": "http://example.org/vehicle/temperature", + "testedAt": { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@id": "http://example.org/vehicle/testedAt" + } + } + ], + "id": "urn:ngsi-ld:Vehicle:B9211", + "type": "Vehicle", + "createdAt": "2018-08-01T12:03:00Z", + "modifiedAt": "2018-08-01T12:03:00Z", + "brandName": [ + { + "type": "Property", + "value": "Volvo", + "createdAt": "2018-08-01T12:03:00Z", + "modifiedAt": "2018-08-01T12:03:00Z" + } + ], + "color": [ + { + "type": "Property", + "value": "Red", + "createdAt": "2018-08-01T12:03:00Z", + "modifiedAt": "2018-08-01T12:03:00Z", + "observedAt": "2018-06-01T12:03:00Z", + "instanceId": "urn:ngsi-ld:91afea8c-23d9-4d6d-9a35-798c28a9db79" + } + ], + "speed": [ + { + "type": "Property", + "value": 120, + "createdAt": "2018-08-01T12:03:00Z", + "modifiedAt": "2018-08-01T12:03:00Z", + "observedAt": "2018-06-01T12:03:00Z", + "instanceId": "urn:ngsi-ld:be664aaf-a7af-4a99-bebc-e89528238abf" + }, + { + "type": "Property", + "value": 80, + "createdAt": "2018-08-01T12:03:00Z", + "modifiedAt": "2018-08-01T12:03:00Z", + "observedAt": "2018-08-01T12:05:00Z", + "instanceId": "urn:ngsi-ld:d3ac28df-977f-4151-a432-dc088f7400d7" + }, + { + "type": "Property", + "value": 100, + "createdAt": "2018-08-01T12:03:00Z", + "modifiedAt": "2018-08-01T12:03:00Z", + "observedAt": "2018-08-01T12:07:00Z", + "instanceId": "urn:ngsi-ld:70ac695b-52a3-4dde-8d29-d2d5a2b662f7", + "speedAccuracy": [ + { + "type": "Property", + "value": 5, + "createdAt": "2018-08-01T12:03:00Z", + "modifiedAt": "2018-08-01T12:03:00Z", + "observedAt": "2018-08-01T12:07:01Z", + "instanceId": "urn:ngsi-ld:c6db2da4-c9a3-41da-83c1-a9e05c7ebc9c" + }, + { + "type": "Property", + "value": 7, + "createdAt": "2018-08-01T12:03:00Z", + "modifiedAt": "2018-08-01T12:03:00Z", + "observedAt": "2018-08-01T12:07:02Z", + "instanceId": "urn:ngsi-ld:a6158a85-95e8-4cb2-aadd-bf4b79250884" + } + ] + } + ], + "temperature": [ + { + "type": "Property", + "value": 10, + "createdAt": "2018-08-01T12:03:00Z", + "modifiedAt": "2018-08-01T12:03:00Z", + "testedAt": { + "type": "Property", + "value": { + "@type": "DateTime", + "@value": "2018-12-04T12:00:00Z" + } + } + }, + { + "type": "Property", + "value": 15, + "createdAt": "2018-08-01T12:03:00Z", + "modifiedAt": "2018-08-01T12:03:00Z", + "testedAt": { + "type": "Property", + "value": { + "@type": "DateTime", + "@value": "2019-12-04T12:00:00Z" + } + } + } + ], + "location": [ + { + "type": "GeoProperty", + "value": "{ \"type\":\"Point\", \"coordinates\":[ -8.5, 41.2 ] }", + "createdAt": "2018-08-01T12:03:00Z", + "modifiedAt": "2018-08-01T12:03:00Z", + "observedAt": "2018-07-01T12:03:00Z", + "instanceId": "urn:ngsi-ld:cfade4cb-7c71-4135-b69c-24ab83e2afae" + }, + { + "type": "GeoProperty", + "value": "{ \"type\":\"Point\", \"coordinates\":[ -8.5, 42.2 ] }", + "createdAt": "2018-08-01T12:03:00Z", + "modifiedAt": "2018-08-01T12:03:00Z", + "observedAt": "2018-07-01T12:05:00Z", + "instanceId": "urn:ngsi-ld:37192165-11d9-48fa-9952-b6aab55e5046" + }, + { + "type": "GeoProperty", + "value": "{ \"type\":\"Point\", \"coordinates\":[ -8.5, 43.2 ] }", + "createdAt": "2018-08-01T12:03:00Z", + "modifiedAt": "2018-08-01T12:03:00Z", + "observedAt": "2018-08-01T12:07:00Z", + "instanceId": "urn:ngsi-ld:0f978241-7d03-4fa9-96e4-094c2c467395" + } + ] + +} \ No newline at end of file diff --git a/scorpio-broker/History/HistoryManager/dockerfile4maven b/scorpio-broker/History/HistoryManager/dockerfile4maven new file mode 100644 index 0000000000000000000000000000000000000000..8a9cad9642bdedb91f078bb58edd160842fd8ec8 --- /dev/null +++ b/scorpio-broker/History/HistoryManager/dockerfile4maven @@ -0,0 +1,13 @@ +FROM openjdk:22-ea-21-jdk-slim + +WORKDIR /usr/src/scorpio +ARG JAR_FILE_BUILD +ARG JAR_FILE_RUN +ENV JAR_FILE_RUN ${JAR_FILE_RUN} + +COPY target/${JAR_FILE_BUILD} ./${JAR_FILE_RUN} +COPY src/main/resources/application-dist.yml ./config/application.yml + +ENV spring_args "" + +CMD java -jar $JAR_FILE_RUN ${spring_args} diff --git a/scorpio-broker/History/HistoryManager/pom.xml b/scorpio-broker/History/HistoryManager/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..ed025d82d28a0bf3a2f38e1a2a2cfe8eade72b32 --- /dev/null +++ b/scorpio-broker/History/HistoryManager/pom.xml @@ -0,0 +1,76 @@ + + + 4.0.0 + + HistoryManager + jar + + HistoryManager + 1.0.0-SNAPSHOT + + + eu.neclab.ngsildbroker + BrokerParent + 1.0.0-SNAPSHOT + ../../BrokerParent + + + + docker + + + + com.spotify + dockerfile-maven-plugin + 1.4.12 + + + default + + build + push + + + + + dockerfile4maven + scorpiobroker/scorpio + ${project.artifactId}_${project.version} + + ${project.build.finalName}.jar + ${project.artifactId}.jar + + + + + + + + + + + + org.springframework.boot + spring-boot-starter-jdbc + + + org.springframework.boot + spring-boot-starter-logging + + + + + org.springframework.boot + spring-boot-starter-test + test + + + + org.postgresql + postgresql + + + + diff --git a/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/HistoryHandler.java b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/HistoryHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..22c945764621aeab9c42e8c48f01d4ec61f060e3 --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/HistoryHandler.java @@ -0,0 +1,66 @@ +package eu.neclab.ngsildbroker.historymanager; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.cloud.stream.annotation.EnableBinding; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Import; + +import eu.neclab.ngsildbroker.commons.ldcontext.AtContextProducerChannel; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.ngsiqueries.QueryParser; +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; +import eu.neclab.ngsildbroker.commons.securityConfig.SecurityConfig; +import eu.neclab.ngsildbroker.commons.stream.service.CommonKafkaConfig; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaConfig; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.swaggerConfig.SwaggerConfigDetails; +import eu.neclab.ngsildbroker.historymanager.config.ProducerChannel; + + +import org.springframework.web.client.RestTemplate; + +@SpringBootApplication +@EnableBinding({ AtContextProducerChannel.class,ProducerChannel.class }) +@Import({CommonKafkaConfig.class, SwaggerConfigDetails.class}) +public class HistoryHandler { + public static void main(String[] args) { + SpringApplication.run(HistoryHandler.class, args); + } + + @Bean + ContextResolverBasic conRes() { + return new ContextResolverBasic(); + } + + @Bean + KafkaOps ops() { + return new KafkaOps(); + } + + @Bean + SecurityConfig securityConfig() { + return new SecurityConfig(); + } + + @Bean + ResourceConfigDetails resourceConfigDetails() { + return new ResourceConfigDetails(); + } + + @Bean + QueryParser queryParser() { + return new QueryParser(); + } + @Bean + ParamsResolver paramsResolver() { + return new ParamsResolver(); + } + + @Bean("hsrestTemp") + RestTemplate restTemp() { + return new RestTemplate(); + } + +} diff --git a/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/config/HistoryManagerResourceConfigurer.java b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/config/HistoryManagerResourceConfigurer.java new file mode 100644 index 0000000000000000000000000000000000000000..81379c6dcd68fe801f2f1a0899aca9300fe0c49a --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/config/HistoryManagerResourceConfigurer.java @@ -0,0 +1,27 @@ +package eu.neclab.ngsildbroker.historymanager.config; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer; +import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter; +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; + +/** + * REST API Resource Server. + */ +@Configuration +@EnableWebSecurity +@EnableResourceServer +@EnableGlobalMethodSecurity(prePostEnabled = true) // Allow method annotations like @PreAuthorize +public class HistoryManagerResourceConfigurer extends ResourceServerConfigurerAdapter { + @Autowired + private ResourceConfigDetails resourceConfigDetails; + + @Override + public void configure(HttpSecurity http) throws Exception { + resourceConfigDetails.ngbSecurityConfig(http); + } +} diff --git a/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/config/JPAConfig.java b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/config/JPAConfig.java new file mode 100644 index 0000000000000000000000000000000000000000..d88a3af23139e06e74938b7cdef6e6a0222a3139 --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/config/JPAConfig.java @@ -0,0 +1,50 @@ +package eu.neclab.ngsildbroker.historymanager.config; + +//@Configuration +//@EnableTransactionManagement +public class JPAConfig { + + /*@Bean + public LocalContainerEntityManagerFactoryBean entityManagerFactory() { + LocalContainerEntityManagerFactoryBean em = new LocalContainerEntityManagerFactoryBean(); + em.setDataSource(dataSource()); + em.setPackagesToScan(new String[] { "eu.neclab.ngsildbroker.historymanager.repository" }); + + JpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter(); + em.setJpaVendorAdapter(vendorAdapter); + em.setJpaProperties(additionalProperties()); + + return em; + } + + @Bean + public DataSource dataSource() { + DriverManagerDataSource dataSource = new DriverManagerDataSource(); + dataSource.setDriverClassName("com.mysql.cj.jdbc.Driver"); + dataSource.setUrl("jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_historymanager"); + dataSource.setUsername("postgres"); + dataSource.setPassword("postgres"); + return dataSource; + } + + @Bean + public PlatformTransactionManager transactionManager(EntityManagerFactory emf) { + JpaTransactionManager transactionManager = new JpaTransactionManager(); + transactionManager.setEntityManagerFactory(emf); + + return transactionManager; + } + + @Bean + public PersistenceExceptionTranslationPostProcessor exceptionTranslation() { + return new PersistenceExceptionTranslationPostProcessor(); + } + + Properties additionalProperties() { + Properties properties = new Properties(); + properties.setProperty("hibernate.hbm2ddl.auto", "create-drop"); + properties.setProperty("hibernate.dialect", "org.hibernate.dialect.MySQL5Dialect"); + + return properties; + }*/ +} diff --git a/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/config/ProducerChannel.java b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/config/ProducerChannel.java new file mode 100644 index 0000000000000000000000000000000000000000..298f779f4411d155d5a63114394dc66334137348 --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/config/ProducerChannel.java @@ -0,0 +1,15 @@ +package eu.neclab.ngsildbroker.historymanager.config; + +import org.springframework.cloud.stream.annotation.Output; +import org.springframework.messaging.MessageChannel; + +import eu.neclab.ngsildbroker.commons.stream.interfaces.IProducerChannels; + +public interface ProducerChannel extends IProducerChannels { + + public String temporalEntityWriteChannel = "TEMPORAL_ENTITY_WRITE_CHANNEL"; + + @Output(temporalEntityWriteChannel) + MessageChannel temporalEntityWriteChannel(); + +} diff --git a/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/controller/.HistoryController.java.swp b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/controller/.HistoryController.java.swp new file mode 100644 index 0000000000000000000000000000000000000000..c1695a6db0effa5524ca10501cca6d8ea12b77e2 Binary files /dev/null and b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/controller/.HistoryController.java.swp differ diff --git a/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/controller/HistoryController.java b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/controller/HistoryController.java new file mode 100644 index 0000000000000000000000000000000000000000..16a4f9b2914133b5843b89e2daa994f4050f41e3 --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/controller/HistoryController.java @@ -0,0 +1,524 @@ +package eu.neclab.ngsildbroker.historymanager.controller; + +import java.net.URI; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; + + + +import javax.annotation.PostConstruct; +import javax.servlet.http.HttpServletRequest; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PatchMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.datatypes.QueryResult; +import eu.neclab.ngsildbroker.commons.datatypes.RestResponse; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; +import eu.neclab.ngsildbroker.historymanager.repository.HistoryDAO; +import eu.neclab.ngsildbroker.historymanager.service.HistoryService; +import eu.neclab.ngsildbroker.historymanager.utils.Validator; + +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import eu.neclab.ngsildbroker.historymanager.repository.TCSourceDAO; +import org.springframework.beans.factory.annotation.Qualifier; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.Map; +import java.util.Set; +import java.util.HashSet; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.web.client.RestTemplate; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.google.gson.JsonSyntaxException; +import com.google.gson.reflect.TypeToken; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; + +@RestController +@RequestMapping("/ngsi-ld/v1/temporal/entities") +public class HistoryController { + + private final static Logger logger = LoggerFactory.getLogger(HistoryController.class); + + @Autowired + ParamsResolver paramsResolver; + + @Autowired + HistoryDAO historyDAO; + + @Autowired + HistoryService historyService; + @Autowired + ContextResolverBasic contextResolver; + @Value("${atcontext.url}") + String atContextServerUrl; + + @Autowired + @Qualifier("qmtcsourcedao") + TCSourceDAO cSourceDAO; + + @Autowired + @Qualifier("hsrestTemp") + RestTemplate restTemplate; + + @Autowired + ObjectMapper objectMapper; + + private HttpUtils httpUtils; + + @PostConstruct + private void setup() { + this.httpUtils = HttpUtils.getInstance(contextResolver); + } + + @PostMapping + public ResponseEntity createTemporalEntity(HttpServletRequest request, + @RequestBody(required = false) String payload) { + try { + logger.trace("createTemporalEntity :: started"); + Validator.validateTemporalEntity(payload); + + String resolved = httpUtils.expandPayload(request, payload, AppConstants.HISTORY_URL_ID); + + URI uri = historyService.createTemporalEntityFromBinding(resolved); + logger.trace("createTemporalEntity :: completed"); + return ResponseEntity.status(HttpStatus.CREATED).header("Location", uri.toString()).body(uri.toString().getBytes()); + } catch (ResponseException exception) { + logger.error("Exception", exception); + return ResponseEntity.status(exception.getHttpStatus()).body(new RestResponse(exception).toJsonBytes()); + } catch (Exception exception) { + logger.error("Exception", exception); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, exception.getLocalizedMessage()).toJsonBytes()); + } + } + + + /** + * making http call to all discovered csources async. + * + * @param endpointsList + * @param query + * @return List + * @throws InterruptedException + * @throws ExecutionException + * @throws URISyntaxException + * @throws ResponseException + * @throws IOException + */ + private List getDataFromCsources(Set> callablesCollection) + throws ResponseException, Exception { + List allDiscoveredEntities = new ArrayList(); + ExecutorService executorService = Executors.newFixedThreadPool(2); + List> futures = executorService.invokeAll(callablesCollection); + // TODO: why sleep? + // Thread.sleep(5000); + for (Future future : futures) { + logger.trace("future.isDone = " + future.isDone()); + List entitiesList = new ArrayList(); + try { + String response = (String) future.get(); + logger.debug("response from invoke all ::" + response); + if (!("[]").equals(response) && response != null) { + JsonNode jsonNode = objectMapper.readTree(response); + for (int i = 0; i <= jsonNode.size(); i++) { + if (jsonNode.get(i) != null && !jsonNode.isNull()) { + String payload = contextResolver.expand(jsonNode.get(i).toString(), null, true, AppConstants.ENTITIES_URL_ID);// , linkHeaders); + entitiesList.add(payload); + } + } + } + } catch (JsonSyntaxException | ExecutionException e) { + logger.error("Exception ::", e); + } + allDiscoveredEntities.addAll(entitiesList); + } + executorService.shutdown(); + logger.trace("getDataFromCsources() completed ::"); + return allDiscoveredEntities; + } + + public ResponseEntity generateReply(HttpServletRequest request, QueryResult qResult, boolean forceArray) + throws ResponseException { + String nextLink = generateNextLink(request, qResult); + String prevLink = generatePrevLink(request, qResult); + ArrayList additionalLinks = new ArrayList(); + if (nextLink != null) { + additionalLinks.add(nextLink); + } + if (prevLink != null) { + additionalLinks.add(prevLink); + } + + HashMap> additionalHeaders = new HashMap>(); + if (!additionalLinks.isEmpty()) { + additionalHeaders.put(HttpHeaders.LINK, additionalLinks); + } + + return httpUtils.generateReply(request, "[" + String.join(",", qResult.getDataString()) + "]", + additionalHeaders, null, forceArray); + } + + private String generateNextLink(HttpServletRequest request, QueryResult qResult) { + if (qResult.getResultsLeftAfter() == null || qResult.getResultsLeftAfter() <= 0) { + return null; + } + return generateFollowUpLinkHeader(request, qResult.getOffset() + qResult.getLimit(), qResult.getLimit(), + qResult.getqToken(), "next"); + } + + private String generateFollowUpLinkHeader(HttpServletRequest request, int offset, int limit, String token, + String rel) { + + StringBuilder builder = new StringBuilder(" entry : request.getParameterMap().entrySet()) { + String[] values = entry.getValue(); + String key = entry.getKey(); + if (key.equals("offset")) { + continue; + } + if (key.equals("qtoken")) { + continue; + } + if (key.equals("limit")) { + continue; + } + + for (String value : values) { + builder.append(key + "=" + value + "&"); + } + + } + builder.append("offset=" + offset + "&"); + builder.append("limit=" + limit + "&"); + builder.append("qtoken=" + token + ">;rel=\"" + rel + "\""); + return builder.toString(); + } + + private String generatePrevLink(HttpServletRequest request, QueryResult qResult) { + if (qResult.getResultsLeftBefore() == null || qResult.getResultsLeftBefore() <= 0) { + return null; + } + int offset = qResult.getOffset() - qResult.getLimit(); + if (offset < 0) { + offset = 0; + } + int limit = qResult.getLimit(); + + return generateFollowUpLinkHeader(request, offset, limit, qResult.getqToken(), "prev"); + } + + @GetMapping + public ResponseEntity retrieveTemporalEntity(HttpServletRequest request) { + String params = request.getQueryString(); + List linkHeaders = HttpUtils.parseLinkHeader(request, NGSIConstants.HEADER_REL_LDCONTEXT); + List aggregatedResult = new ArrayList(); + List realResult; + QueryResult result = new QueryResult(null, null, ErrorType.None, -2, true); + + + //QueryResult result = new QueryResult(null, null, ErrorType.None, -1, true); + try { + logger.info(" Stratos:: retrieving temporal entity"); + logger.trace("retrieveTemporalEntity :: started"); + logger.info("params:" + params); + if (params != null && !Validator.validate(params)) + throw new ResponseException(ErrorType.BadRequestData); + + QueryParams qp = paramsResolver.getQueryParamsFromUriQuery(request.getParameterMap(), + HttpUtils.parseLinkHeader(request, NGSIConstants.HEADER_REL_LDCONTEXT), true); + if (qp == null) // invalid query + throw new ResponseException(ErrorType.InvalidRequest); + if (qp.getTimerel() == null || qp.getTime() == null) { + throw new ResponseException(ErrorType.BadRequestData, "Time filter is required"); + } + if (qp.getType() == null && qp.getAttrs() == null) { + throw new ResponseException(ErrorType.BadRequestData, "Type or attrs is required"); + } + + logger.info("STRATOS IM HERE 2"); + ExecutorService executorService = Executors.newFixedThreadPool(2); + + Future> futureStorageManager = executorService.submit(new Callable>() { + public List call() throws Exception { + logger.trace("Asynchronous Callable storage manager"); + //TAKE CARE OF PAGINATION HERE + if (historyDAO != null) { + + return historyDAO.query(qp); + } else { + //return getFromStorageManager(DataSerializer.toJson(qp)); + return null; + } + //if (queryDAO != null) { + //return queryDAO.query(qp); + //} else { + //return getFromStorageManager(DataSerializer.toJson(qp)); + //} + } + }); + + Future> futureContextRegistry = executorService.submit(new Callable>() { + public List call() throws Exception { + try { + List fromCsources = new ArrayList(); + logger.info("STRATOS IM HERE 2"); + logger.trace("Asynchronous 1 context registry"); + List brokerList; + if (cSourceDAO != null) { + brokerList = cSourceDAO.queryExternalCsources(qp); + } else { + //brokerList = getFromContextRegistry(DataSerializer.toJson(qp)); + brokerList = cSourceDAO.queryExternalCsources(qp); + } + for (String brokerInfo : brokerList) { + logger.info("STRATOS STRING IS:"+brokerInfo); + } + Pattern p = Pattern.compile(NGSIConstants.NGSI_LD_ENDPOINT_REGEX); + Matcher m; + Set> callablesCollection = new HashSet>(); + for (String brokerInfo : brokerList) { + m = p.matcher(brokerInfo); + m.find(); + String uri = m.group(1); + logger.debug("url " + uri.toString() + "/ngsi-ld/v1/temporal/entities/?" + params); + logger.info("STRATOS url " + uri.toString() + "/ngsi-ld/v1/temporal/entities/?" + params); + Callable callable = () -> { + HttpHeaders headers = new HttpHeaders(); + for (Object link : linkHeaders) { + headers.add("Link", "<" + link.toString() + + ">; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\""); + } + + HttpEntity entity = new HttpEntity<>(headers); + + String result = restTemplate.exchange(uri + "/ngsi-ld/v1/temporal/entities/?" + params, + HttpMethod.GET, entity, String.class).getBody(); + + logger.debug("http call result :: ::" + result); + return result; + }; + callablesCollection.add(callable); + + } + fromCsources = getDataFromCsources(callablesCollection); + logger.debug("csource call response :: "); + // fromCsources.forEach(e -> logger.debug(e)); + + return fromCsources; + } catch (Exception e) { + e.printStackTrace(); + logger.error( + "No reply from registry. Looks like you are running without a context source registry."); + logger.error(e.getMessage()); + return null; + } + } + }); + +// // Csources response + + executorService.shutdown(); + + +// +// // storage response + logger.trace("storage task status completed :: " + futureStorageManager.isDone()); + List fromStorage = (List) futureStorageManager.get(); + List fromCsources = (List) futureContextRegistry.get(); + aggregatedResult.addAll(fromStorage); + if (fromCsources != null) { + aggregatedResult.addAll(fromCsources); + } + realResult = aggregatedResult; + result.setDataString(realResult); +// result.setqToken(qToken); +// result.setLimit(limit); +// result.setOffset(offset); +// result.setResultsLeftAfter(dataLeft); +// result.setResultsLeftBefore(offset); + logger.trace("retrieveTemporalEntity :: completed"); + QueryResult qResult = result; + return generateReply(request, qResult, true); + //return httpUtils.generateReply(request, historyDAO.getListAsJsonArray(historyDAO.query(qp))); + //return httpUtils.generateReply(request, result); + } catch (ResponseException ex) { + logger.error("Exception", ex); + return ResponseEntity.status(ex.getHttpStatus()).body(new RestResponse(ex).toJsonBytes()); + } catch (Exception ex) { + logger.error("Exception", ex); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, ex.getLocalizedMessage()).toJsonBytes()); + } + } + + @GetMapping("/{entityId}") + public ResponseEntity retrieveTemporalEntityById(HttpServletRequest request, + @PathVariable("entityId") String entityId) { + String params = request.getQueryString(); + try { + logger.info(" Stratos:: retrieving temporal entity with id " + entityId); + logger.trace("retrieveTemporalEntityById :: started " + entityId); + logger.debug("entityId : " + entityId); + if (params != null && !Validator.validate(params)) + throw new ResponseException(ErrorType.BadRequestData); + + QueryParams qp = paramsResolver.getQueryParamsFromUriQuery(request.getParameterMap(), + HttpUtils.parseLinkHeader(request, NGSIConstants.HEADER_REL_LDCONTEXT), true); + qp.setId(entityId); + logger.trace("retrieveTemporalEntityById :: completed"); + return httpUtils.generateReply(request, historyDAO.getListAsJsonArray(historyDAO.query(qp))); + } catch (ResponseException ex) { + logger.error("Exception", ex); + return ResponseEntity.status(ex.getHttpStatus()).body(new RestResponse(ex).toJsonBytes()); + } catch (Exception ex) { + logger.error("Exception", ex); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, ex.getLocalizedMessage()).toJsonBytes()); + } + } + + @DeleteMapping("/{entityId}") + public ResponseEntity deleteTemporalEntityById(HttpServletRequest request, + @PathVariable("entityId") String entityId) { + try { + logger.trace("deleteTemporalEntityById :: started"); + logger.debug("entityId : " + entityId); + historyService.delete(entityId, null, null, + HttpUtils.parseLinkHeader(request, NGSIConstants.HEADER_REL_LDCONTEXT)); + logger.trace("deleteTemporalEntityById :: completed"); + return ResponseEntity.noContent().build(); + } catch (ResponseException ex) { + logger.error("Exception", ex); + return ResponseEntity.status(ex.getHttpStatus()).body(new RestResponse(ex).toJsonBytes()); + } catch (Exception ex) { + logger.error("Exception", ex); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, ex.getLocalizedMessage()).toJsonBytes()); + } + } + + @PostMapping("/{entityId}/attrs") + public ResponseEntity addAttrib2TemopralEntity(HttpServletRequest request, + @PathVariable("entityId") String entityId, @RequestBody(required = false) String payload) { + try { + logger.trace("addAttrib2TemopralEntity :: started"); + logger.debug("entityId : " + entityId); + String resolved = httpUtils.expandPayload(request, payload, AppConstants.HISTORY_URL_ID); + + historyService.addAttrib2TemporalEntity(entityId, resolved); + logger.trace("addAttrib2TemopralEntity :: completed"); + return ResponseEntity.noContent().build(); + } catch (ResponseException ex) { + logger.error("Exception", ex); + return ResponseEntity.status(ex.getHttpStatus()).body(new RestResponse(ex).toJsonBytes()); + } catch (Exception ex) { + logger.error("Exception", ex); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, ex.getLocalizedMessage()).toJsonBytes()); + } + } + + @DeleteMapping("/{entityId}/attrs/{attrId}") + public ResponseEntity deleteAttrib2TemporalEntity(HttpServletRequest request, + @PathVariable("entityId") String entityId, @PathVariable("attrId") String attrId) { + try { + logger.trace("deleteAttrib2TemporalEntity :: started"); + logger.debug("entityId : " + entityId + " attrId : " + attrId); + historyService.delete(entityId, attrId, null, + HttpUtils.parseLinkHeader(request, NGSIConstants.HEADER_REL_LDCONTEXT)); + logger.trace("deleteAttrib2TemporalEntity :: completed"); + return ResponseEntity.noContent().build(); + } catch (ResponseException ex) { + logger.error("Exception", ex); + return ResponseEntity.status(ex.getHttpStatus()).body(new RestResponse(ex).toJsonBytes()); + } catch (Exception ex) { + logger.error("Exception", ex); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, ex.getLocalizedMessage()).toJsonBytes()); + } + } + + @PatchMapping("/{entityId}/attrs/{attrId}/{instanceId}") + public ResponseEntity modifyAttribInstanceTemporalEntity(HttpServletRequest request, + @PathVariable("entityId") String entityId, @PathVariable("attrId") String attrId, + @PathVariable("instanceId") String instanceId, @RequestBody(required = false) String payload) { + try { + logger.trace("modifyAttribInstanceTemporalEntity :: started"); + logger.debug("entityId : " + entityId + " attrId : " + attrId + " instanceId : " + instanceId); + + String resolved = httpUtils.expandPayload(request, payload, AppConstants.HISTORY_URL_ID); + + // TODO : TBD- conflict between specs and implementation + historyService.modifyAttribInstanceTemporalEntity(entityId, resolved, attrId, instanceId, + HttpUtils.parseLinkHeader(request, NGSIConstants.HEADER_REL_LDCONTEXT)); + logger.trace("modifyAttribInstanceTemporalEntity :: completed"); + return ResponseEntity.noContent().build(); + } catch (ResponseException ex) { + logger.error("Exception", ex); + return ResponseEntity.status(ex.getHttpStatus()).body(new RestResponse(ex).toJsonBytes()); + } catch (Exception ex) { + logger.error("Exception", ex); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, ex.getLocalizedMessage()).toJsonBytes()); + } + } + + @DeleteMapping("/{entityId}/attrs/{attrId}/{instanceId}") + public ResponseEntity deleteAtrribInstanceTemporalEntity(HttpServletRequest request, + @PathVariable("entityId") String entityId, @PathVariable("attrId") String attrId, + @PathVariable("instanceId") String instanceId) { + try { + logger.trace("deleteAtrribInstanceTemporalEntity :: started"); + logger.debug("entityId : " + entityId + " attrId : " + attrId + " instanceId : " + instanceId); + historyService.delete(entityId, attrId, instanceId, + HttpUtils.parseLinkHeader(request, NGSIConstants.HEADER_REL_LDCONTEXT)); + logger.trace("deleteAtrribInstanceTemporalEntity :: completed"); + return ResponseEntity.noContent().build(); + } catch (ResponseException ex) { + logger.error("Exception", ex); + return ResponseEntity.status(ex.getHttpStatus()).body(new RestResponse(ex).toJsonBytes()); + } catch (Exception ex) { + logger.error("Exception", ex); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, ex.getLocalizedMessage()).toJsonBytes()); + } + } + +} diff --git a/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/repository/HistoryDAO.java b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/repository/HistoryDAO.java new file mode 100644 index 0000000000000000000000000000000000000000..3a06bb3b075e95949409b98a84203fc2a03a8001 --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/repository/HistoryDAO.java @@ -0,0 +1,219 @@ +package eu.neclab.ngsildbroker.historymanager.repository; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Repository; + +import eu.neclab.ngsildbroker.commons.constants.DBConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.GeoqueryRel; +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.storage.StorageReaderDAO; + +@Repository +public class HistoryDAO extends StorageReaderDAO { + + protected final static Logger logger = LoggerFactory.getLogger(HistoryDAO.class); + + protected final static String DBCOLUMN_HISTORY_ENTITY_ID = "id"; + protected final static String DBCOLUMN_HISTORY_ENTITY_TYPE = "type"; + protected final static String DBCOLUMN_HISTORY_ATTRIBUTE_ID = "attributeid"; + protected final static String DBCOLUMN_HISTORY_INSTANCE_ID = "instanceid"; + + protected final static Map NGSILD_TO_SQL_RESERVED_PROPERTIES_MAPPING_TIME = initNgsildToSqlReservedPropertiesMappingTime(); + + protected static Map initNgsildToSqlReservedPropertiesMappingTime() { + Map map = new HashMap<>(); + map.put(NGSIConstants.NGSI_LD_CREATED_AT, DBConstants.DBCOLUMN_CREATED_AT); + map.put(NGSIConstants.NGSI_LD_MODIFIED_AT, DBConstants.DBCOLUMN_MODIFIED_AT); + map.put(NGSIConstants.NGSI_LD_OBSERVED_AT, DBConstants.DBCOLUMN_OBSERVED_AT); + return Collections.unmodifiableMap(map); + } + + @Override + protected String translateNgsildQueryToSql(QueryParams qp) throws ResponseException { + StringBuilder fullSqlWhere = new StringBuilder(70); + String sqlWhereGeoquery = ""; + String sqlWhere = ""; + + if (qp.getType() != null) { + sqlWhere = getSqlWhereForField("te." + DBCOLUMN_HISTORY_ENTITY_TYPE, qp.getType()); + fullSqlWhere.append(sqlWhere + " AND "); + } + if (qp.getAttrs() != null) { + sqlWhere = getSqlWhereForField("teai." + DBCOLUMN_HISTORY_ATTRIBUTE_ID, qp.getAttrs()); + fullSqlWhere.append(sqlWhere + " AND "); + } + if (qp.getInstanceId() != null) { + sqlWhere = getSqlWhereForField("teai." + DBCOLUMN_HISTORY_INSTANCE_ID, qp.getInstanceId()); + fullSqlWhere.append(sqlWhere + " AND "); + } + if (qp.getId() != null) { + sqlWhere = getSqlWhereForField("te." + DBCOLUMN_HISTORY_ENTITY_ID, qp.getId()); + fullSqlWhere.append(sqlWhere + " AND "); + } + if (qp.getIdPattern() != null) { + sqlWhere = "te." + DBCOLUMN_HISTORY_ENTITY_ID + " ~ '" + qp.getIdPattern() + "'"; + fullSqlWhere.append(sqlWhere + " AND "); + } + + // temporal query + if (qp.getTimerel() != null) { + sqlWhere = translateNgsildTimequeryToSql(qp.getTimerel(), qp.getTime(), qp.getTimeproperty(), + qp.getEndTime(), "teai."); + fullSqlWhere.append(sqlWhere + " AND "); + } + + // geoquery + if (qp.getGeorel() != null) { + GeoqueryRel gqr = qp.getGeorel(); + logger.debug("Georel value " + gqr.getGeorelOp()); + sqlWhere = translateNgsildGeoqueryToPostgisQuery(gqr, qp.getGeometry(), qp.getCoordinates(), + qp.getGeoproperty(), "geovalue"); + if (!sqlWhere.isEmpty()) { + String sqlWhereTemporal = translateNgsildTimequeryToSql(qp.getTimerel(), qp.getTime(), + qp.getTimeproperty(), qp.getEndTime(), ""); + sqlWhereGeoquery = "where exists (" + " select 1 " + " from temporalentityattrinstance " + + " where temporalentity_id = r.id and " + " attributeid = '" + qp.getGeoproperty() + + "' and " + " attributetype = '" + NGSIConstants.NGSI_LD_GEOPROPERTY + "' and " + + sqlWhereTemporal + " and " + sqlWhere + ") "; + } + } + + + String sqlQuery = "with r as (" + + " select te.id, te.type, te.createdat, te.modifiedat, coalesce(teai.attributeid, '') as attributeid, jsonb_agg(teai.data"; + + if (!qp.getIncludeSysAttrs()) { + sqlQuery += " - '" + NGSIConstants.NGSI_LD_CREATED_AT + "' - '" + NGSIConstants.NGSI_LD_MODIFIED_AT + "'"; + } + sqlQuery += " order by teai.modifiedat desc) as attributedata" + " from " + DBConstants.DBTABLE_TEMPORALENTITY + + " te" + " left join " + DBConstants.DBTABLE_TEMPORALENTITY_ATTRIBUTEINSTANCE + + " teai on (teai.temporalentity_id = te.id)" + " where "; + sqlQuery += fullSqlWhere.toString() + " 1=1 "; + sqlQuery += " group by te.id, te.type, te.createdat, te.modifiedat, teai.attributeid " + + " order by te.id, teai.attributeid " + ") " + + "select tedata || case when attrdata <> '{\"\": [null]}'::jsonb then attrdata else tedata end as data from ( " + + " select id, ('{\"" + NGSIConstants.JSON_LD_ID + "\":\"' || id || '\"}')::jsonb || " + + " ('{\"" + NGSIConstants.JSON_LD_TYPE + "\":[\"' || type || '\"]}')::jsonb "; + if (qp.getIncludeSysAttrs()) { + sqlQuery += " || ('{\"" + NGSIConstants.NGSI_LD_CREATED_AT + "\":"; + //if (!qp.getTemporalValues()) { + sqlQuery += "[ { \"" + NGSIConstants.JSON_LD_TYPE + "\": \"" + NGSIConstants.NGSI_LD_DATE_TIME + + "\", \"" + NGSIConstants.JSON_LD_VALUE + "\": \"' "; + //} else { + // sqlQuery += "\"'"; + //} + sqlQuery += "|| to_char(createdat, 'YYYY-MM-DD\"T\"HH24:MI:SS.ssssss\"Z\"') || '\""; + //if (!qp.getTemporalValues()) { + sqlQuery += "}]"; + //} + sqlQuery += "}')::jsonb || "; + sqlQuery += " ('{\"" + NGSIConstants.NGSI_LD_MODIFIED_AT + "\":"; + //if (!qp.getTemporalValues()) { + sqlQuery += "[ { \"" + NGSIConstants.JSON_LD_TYPE + "\": \"" + NGSIConstants.NGSI_LD_DATE_TIME + + "\", \"" + NGSIConstants.JSON_LD_VALUE + "\": \"' "; + //} else { + // sqlQuery += "\"'"; + //} + sqlQuery += "|| to_char(modifiedat, 'YYYY-MM-DD\"T\"HH24:MI:SS.ssssss\"Z\"') || '\""; + //if (!qp.getTemporalValues()) { + sqlQuery += "}]"; + //} + sqlQuery += "}')::jsonb"; + } + sqlQuery += " as tedata, " + "jsonb_object_agg(attributeid,"; + if (qp.getTemporalValues()) { + if (qp.getTimeproperty().equalsIgnoreCase(NGSIConstants.NGSI_LD_MODIFIED_AT)) { + sqlQuery += "(select json_agg(jsonb_build_array(t -> '" + NGSIConstants.NGSI_LD_HAS_VALUE + "'->0->'" + + NGSIConstants.JSON_LD_VALUE + "',t->'" + NGSIConstants.NGSI_LD_MODIFIED_AT + "'->0->'" + + NGSIConstants.JSON_LD_VALUE + "')) from jsonb_array_elements(attributedata) as x(t))"; + } else if (qp.getTimeproperty().equalsIgnoreCase(NGSIConstants.NGSI_LD_CREATED_AT)) { + sqlQuery += "(select json_agg(jsonb_build_array(t -> '" + NGSIConstants.NGSI_LD_HAS_VALUE + "'->0->'" + + NGSIConstants.JSON_LD_VALUE + "',t->'" + NGSIConstants.NGSI_LD_CREATED_AT + "'->0->'" + + NGSIConstants.JSON_LD_VALUE + "')) from jsonb_array_elements(attributedata) as x(t))"; + } else if (qp.getTimeproperty().equalsIgnoreCase(NGSIConstants.NGSI_LD_OBSERVED_AT)) { + sqlQuery += "(select json_agg(jsonb_build_array(t -> '" + NGSIConstants.NGSI_LD_HAS_VALUE + "'->0->'" + + NGSIConstants.JSON_LD_VALUE + "',t->'" + NGSIConstants.NGSI_LD_OBSERVED_AT + "'->0->'" + + NGSIConstants.JSON_LD_VALUE + "')) from jsonb_array_elements(attributedata) as x(t))"; + } + } else { + sqlQuery += "attributedata"; + } + + sqlQuery += ") as attrdata " + " from r "; + sqlQuery += sqlWhereGeoquery; + sqlQuery += " group by id, type, createdat, modifiedat "; + sqlQuery += " order by modifiedat desc "; + sqlQuery += ") as m"; + + // advanced query "q" + //THIS DOESN'T WORK + if (qp.getQ() != null) { + sqlQuery += " where " + qp.getQ(); + } + + return sqlQuery; + } + + private String getSqlWhereForField(String dbColumn, String value) { + String sqlWhere = ""; + if (value.indexOf(",") == -1) { + sqlWhere = dbColumn + "='" + value + "'"; + } else { + sqlWhere = dbColumn + " IN ('" + value.replace(",", "','") + "')"; + } + return sqlWhere; + } + + protected String translateNgsildTimequeryToSql(String timerel, String time, String timeproperty, String endTime, + String dbPrefix) throws ResponseException { + StringBuilder sqlWhere = new StringBuilder(50); + + String sqlTestStatic = dbPrefix + "static = true AND "; + + String dbColumn = NGSILD_TO_SQL_RESERVED_PROPERTIES_MAPPING_TIME.get(timeproperty); + if (dbColumn == null) { + sqlTestStatic += "data?'" + timeproperty + "' = false"; + dbColumn = "(" + dbPrefix + "data#>>'{" + timeproperty + ",0," + NGSIConstants.NGSI_LD_HAS_VALUE + + ",0,@value}')::timestamp "; + } else { + dbColumn = dbPrefix + dbColumn; + sqlTestStatic += dbColumn + " IS NULL"; + } + + sqlWhere.append("( (" + sqlTestStatic + ") OR "); // temporal filters do not apply to static attributes + + switch (timerel) { + case NGSIConstants.TIME_REL_BEFORE: + sqlWhere.append(dbColumn + DBConstants.SQLQUERY_LESSEQ + " '" + time + "'::timestamp"); + break; + case NGSIConstants.TIME_REL_AFTER: + sqlWhere.append(dbColumn + DBConstants.SQLQUERY_GREATEREQ + " '" + time + "'::timestamp"); + break; + case NGSIConstants.TIME_REL_BETWEEN: + sqlWhere.append(dbColumn + " BETWEEN '" + time + "'::timestamp AND '" + endTime + "'::timestamp"); + break; + default: + throw new ResponseException(ErrorType.BadRequestData, "Invalid georel operator: " + timerel); + } + sqlWhere.append(")"); + return sqlWhere.toString(); + } + + public boolean entityExists(String entityId) { + List list = readerJdbcTemplate.queryForList("Select id from temporalentity where id='" + entityId + "';"); + if (list == null || list.isEmpty()) { + return false; + } + return true; + } + +} diff --git a/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/repository/TCSourceDAO.java b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/repository/TCSourceDAO.java new file mode 100644 index 0000000000000000000000000000000000000000..4ce807a0c1047e65b472f9cbefbdacf7784414ff --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/repository/TCSourceDAO.java @@ -0,0 +1,329 @@ +package eu.neclab.ngsildbroker.historymanager.repository; + +import java.sql.SQLException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.stereotype.Repository; + +import eu.neclab.ngsildbroker.commons.constants.DBConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.GeoqueryRel; +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.storage.StorageReaderDAO; + +@Repository("qmtcsourcedao") +public class TCSourceDAO extends StorageReaderDAO { + + private final static Logger logger = LogManager.getLogger(TCSourceDAO.class); + + protected final static String DBCOLUMN_CSOURCE_INFO_ENTITY_ID = "entity_id"; + protected final static String DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN = "entity_idpattern"; + protected final static String DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE = "entity_type"; + protected final static String DBCOLUMN_CSOURCE_INFO_PROPERTY_ID = "property_id"; + protected final static String DBCOLUMN_CSOURCE_INFO_RELATIONSHIP_ID = "relationship_id"; + + protected final static Map NGSILD_TO_SQL_RESERVED_PROPERTIES_MAPPING_GEO = initNgsildToSqlReservedPropertiesMappingGeo(); + + protected static Map initNgsildToSqlReservedPropertiesMappingGeo() { + Map map = new HashMap<>(); + map.put(NGSIConstants.NGSI_LD_LOCATION, DBConstants.DBCOLUMN_LOCATION); + return Collections.unmodifiableMap(map); + } + + protected final static Map NGSILD_TO_POSTGIS_GEO_OPERATORS_MAPPING = initNgsildToPostgisGeoOperatorsMapping(); + + protected static Map initNgsildToPostgisGeoOperatorsMapping() { + Map map = new HashMap<>(); + map.put(NGSIConstants.GEO_REL_NEAR, null); + map.put(NGSIConstants.GEO_REL_WITHIN, DBConstants.POSTGIS_INTERSECTS); + map.put(NGSIConstants.GEO_REL_CONTAINS, DBConstants.POSTGIS_CONTAINS); + map.put(NGSIConstants.GEO_REL_OVERLAPS, null); + map.put(NGSIConstants.GEO_REL_INTERSECTS, DBConstants.POSTGIS_INTERSECTS); + map.put(NGSIConstants.GEO_REL_EQUALS, DBConstants.POSTGIS_CONTAINS); + map.put(NGSIConstants.GEO_REL_DISJOINT, null); + return Collections.unmodifiableMap(map); + } + + private boolean externalCsourcesOnly = false; + + @Override + public List query(QueryParams qp) { + this.externalCsourcesOnly = false; + return super.query(qp); + } + + public List queryExternalCsources(QueryParams qp) throws SQLException { + this.externalCsourcesOnly = true; + return super.query(qp); + } + + @Override + protected String translateNgsildQueryToSql(QueryParams qp) throws ResponseException { + StringBuilder fullSqlWhere = new StringBuilder(70); + String sqlWhere = ""; + boolean csourceInformationIsNeeded = false; + boolean sqlOk = false; + + if (externalCsourcesOnly) { + fullSqlWhere.append("(c.internal = false) AND "); + } + + // query by type + (id, idPattern) + if (qp.getType()!=null) { + + String typeValue = qp.getType(); + String idValue = ""; + String idPatternValue = ""; + if (qp.getId()!=null) + idValue = qp.getId(); + if (qp.getIdPattern()!=null) + idPatternValue = qp.getIdPattern(); + // id takes precedence on idPattern. clear idPattern if both are given + if (!idValue.isEmpty() && !idPatternValue.isEmpty()) + idPatternValue = ""; + + // query by type + (id, idPattern) + attrs + if (qp.getAttrs()!=null) { + String attrsValue = qp.getAttrs(); + sqlWhere = getCommonSqlWhereForTypeIdIdPattern(typeValue, idValue, idPatternValue); + sqlWhere += " AND "; + sqlWhere += getSqlWhereByAttrsInTypeFiltering(attrsValue); + + } else { // query by type + (id, idPattern) only (no attrs) + + sqlWhere = "(c.has_registrationinfo_with_attrs_only) OR "; + sqlWhere += getCommonSqlWhereForTypeIdIdPattern(typeValue, idValue, idPatternValue); + + } + fullSqlWhere.append("(" + sqlWhere + ") AND "); + csourceInformationIsNeeded = true; + sqlOk = true; + + // query by attrs only + } else if (qp.getAttrs()!=null) { + String attrsValue = qp.getAttrs(); + if (attrsValue.indexOf(",") == -1) { + sqlWhere = "ci." + DBCOLUMN_CSOURCE_INFO_PROPERTY_ID+" = '"+attrsValue+"' OR " + +"ci." + DBCOLUMN_CSOURCE_INFO_RELATIONSHIP_ID+" = '"+attrsValue+"'"; + }else { + sqlWhere="ci." + DBCOLUMN_CSOURCE_INFO_PROPERTY_ID+" IN ('"+attrsValue.replace(",", "','")+"') OR " + +"ci." + DBCOLUMN_CSOURCE_INFO_RELATIONSHIP_ID+" IN ('"+attrsValue.replace(",", "','")+"')"; + } + fullSqlWhere.append("(" + sqlWhere + ") AND "); + csourceInformationIsNeeded = true; + sqlOk = true; + } + + // advanced query "q" + if (qp.getQ()!=null) { + // TODO: it's not clear in spec how this should work + logger.error("'q' filter has not been developed yet in csource discovery!"); + return ""; + } + + // geoquery + if (qp.getGeorel()!=null) { + GeoqueryRel gqr = qp.getGeorel(); + logger.debug("Georel value " + gqr.getGeorelOp()); + try { + sqlWhere = translateNgsildGeoqueryToPostgisQuery(gqr, qp.getGeometry(), qp.getCoordinates(), + qp.getGeoproperty()); + } catch (ResponseException e) { + e.printStackTrace(); + } + fullSqlWhere.append(sqlWhere + " AND "); + sqlOk = true; + } + + if (sqlOk) { + String sqlQuery = "SELECT DISTINCT c.data " + "FROM " + DBConstants.DBTABLE_CSOURCE + " c "; + if (csourceInformationIsNeeded) + sqlQuery += "INNER JOIN " + DBConstants.DBTABLE_CSOURCE_INFO + " ci ON (ci.csource_id = c.id) "; + + if (fullSqlWhere.length() > 0) { + sqlQuery += "WHERE " + fullSqlWhere.toString() + " 1=1 "; + } + // order by ? + return sqlQuery; + } else { + return ""; + } + } + + private String getCommonSqlWhereForTypeIdIdPattern(String typeValue, String idValue, String idPatternValue) { + String sqlWhere = ""; + if (idValue.isEmpty() && idPatternValue.isEmpty()) { // case 1: type only + sqlWhere += getSqlWhereByType(typeValue, false); + } else if (!idValue.isEmpty() && idPatternValue.isEmpty()) { // case 2: type+id + sqlWhere += "("; + sqlWhere += getSqlWhereByType(typeValue, true); + sqlWhere += " OR "; + sqlWhere += getSqlWhereById(typeValue, idValue); + sqlWhere += ")"; + } else if (idValue.isEmpty() && !idPatternValue.isEmpty()) { // case 3: type+idPattern + sqlWhere += "("; + sqlWhere += getSqlWhereByType(typeValue, true); + sqlWhere += " OR "; + sqlWhere += getSqlWhereByIdPattern(typeValue, idPatternValue); + sqlWhere += ")"; + } + return sqlWhere; + } + + private String getSqlWhereByType(String typeValue, boolean includeIdAndIdPatternNullTest) { + String sqlWhere = "("; + if (typeValue.indexOf(",") == -1) { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " = '" + typeValue + "' "; + } else { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " IN ('" + typeValue.replace(",", "','") + "') "; + } + if (includeIdAndIdPatternNullTest) + sqlWhere += "AND ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_ID + " IS NULL AND " + + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN + " IS NULL"; + sqlWhere += ")"; + return sqlWhere; + } + + private String getSqlWhereById(String typeValue, String idValue) { + String sqlWhere = "( "; + + if (typeValue.indexOf(",") == -1) { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " = '" + typeValue + "' AND "; + } else { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " IN ('" + typeValue.replace(",", "','") + "') AND "; + } + + if (idValue.indexOf(",") == -1) { + sqlWhere += "(" + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_ID + " = '" + idValue + "' OR " + "'" + + idValue + "' ~ " + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN + ")"; + } else { + String[] ids = idValue.split(","); + String whereId = "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_ID + " IN ( "; + String whereIdPattern = "("; + for (String id : ids) { + whereId += "'" + id + "',"; + whereIdPattern += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN + " ~ '" + id + + "' OR "; + } + whereId = StringUtils.chomp(whereId, ","); + whereIdPattern = StringUtils.chomp(whereIdPattern, "OR "); + whereId += ")"; + whereIdPattern += ")"; + + sqlWhere += "(" + whereId + " OR " + whereIdPattern + ")"; + } + + sqlWhere += " )"; + return sqlWhere; + } + + private String getSqlWhereByIdPattern(String typeValue, String idPatternValue) { + String sqlWhere = "( "; + if (typeValue.indexOf(",") == -1) { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " = '" + typeValue + "' AND "; + } else { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " IN ('" + typeValue.replace(",", "','") + "') AND "; + } + sqlWhere += "(" + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_ID + " ~ '" + idPatternValue + "' OR " + + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN + " ~ '" + idPatternValue + "')"; + sqlWhere += " )"; + return sqlWhere; + } + + private String getSqlWhereByAttrsInTypeFiltering(String attrsValue) { + String sqlWhere; + sqlWhere = "( " + + "NOT EXISTS (SELECT 1 FROM csourceinformation ci2 " + + " WHERE ci2.group_id = ci.group_id AND " + + " (ci2.property_id IS NOT NULL OR ci2.relationship_id IS NOT NULL)) " + + "OR " + + "EXISTS (SELECT 1 FROM csourceinformation ci3 " + + " WHERE ci3.group_id = ci.group_id AND " ; + if (attrsValue.indexOf(",") == -1) { + sqlWhere += "(ci3.property_id = '" + attrsValue + "' OR " + + " ci3.relationship_id = '" + attrsValue + "') "; + } else { + sqlWhere += "(ci3.property_id IN ('" + attrsValue.replace(",", "','") + "') OR " + + " ci3.relationship_id IN ('" + attrsValue.replace(",", "','") + "') ) "; + } + sqlWhere += ") )"; + return sqlWhere; + } + + // TODO: SQL input sanitization + // TODO: property of property + // TODO: [SPEC] spec is not clear on how to define a "property of property" in + // the geoproperty field. (probably using dots, but...) + @Override + protected String translateNgsildGeoqueryToPostgisQuery(GeoqueryRel georel, String geometry, String coordinates, + String geoproperty) throws ResponseException { + if (georel.getGeorelOp().isEmpty() || geometry==null || coordinates==null || geometry.isEmpty() || coordinates.isEmpty()) { + logger.error("georel, geometry and coordinates are empty or invalid!"); + throw new ResponseException(ErrorType.BadRequestData, + "georel, geometry and coordinates are empty or invalid!"); + } + + StringBuilder sqlWhere = new StringBuilder(50); + + String georelOp = georel.getGeorelOp(); + logger.debug(" Geoquery term georelOp: " + georelOp); + + String dbColumn = NGSILD_TO_SQL_RESERVED_PROPERTIES_MAPPING_GEO.get(geoproperty); + if (dbColumn == null) { + dbColumn = "ST_SetSRID(ST_GeomFromGeoJSON( c.data#>>'{" + geoproperty + ",0," + + NGSIConstants.JSON_LD_VALUE + "}'), 4326)"; + } else { + dbColumn = "c." + dbColumn; + } + + String referenceValue = "ST_SetSRID(ST_GeomFromGeoJSON('{\"type\": \"" + geometry + "\", \"coordinates\": " + + coordinates + " }'), 4326)"; + + switch (georelOp) { + case NGSIConstants.GEO_REL_WITHIN: + case NGSIConstants.GEO_REL_CONTAINS: + case NGSIConstants.GEO_REL_INTERSECTS: + case NGSIConstants.GEO_REL_EQUALS: + sqlWhere.append(NGSILD_TO_POSTGIS_GEO_OPERATORS_MAPPING.get(georelOp) + "( " + dbColumn + ", " + + referenceValue + ") "); + break; + case NGSIConstants.GEO_REL_NEAR: + if (georel.getDistanceType()!=null && georel.getDistanceValue()!=null) { + if (georel.getDistanceType().equals(NGSIConstants.GEO_REL_MIN_DISTANCE)) + sqlWhere.append("NOT " + DBConstants.POSTGIS_WITHIN + "( " + dbColumn + ", ST_Buffer(" + referenceValue + + "::geography, " + georel.getDistanceValue() + + ")::geometry ) "); + else + sqlWhere.append(DBConstants.POSTGIS_INTERSECTS + "( " + dbColumn + ", ST_Buffer(" + referenceValue + + "::geography, " + georel.getDistanceValue() + + ")::geometry ) "); + } else { + throw new ResponseException(ErrorType.BadRequestData, + "GeoQuery: Type and distance are required for near relation"); + } + break; + case NGSIConstants.GEO_REL_OVERLAPS: + sqlWhere.append("("); + sqlWhere.append(DBConstants.POSTGIS_OVERLAPS + "( " + dbColumn + ", " + referenceValue + ")"); + sqlWhere.append(" OR "); + sqlWhere.append(DBConstants.POSTGIS_CONTAINS + "( " + dbColumn + ", " + referenceValue + ")"); + sqlWhere.append(")"); + break; + case NGSIConstants.GEO_REL_DISJOINT: + sqlWhere.append("NOT " + DBConstants.POSTGIS_WITHIN + "( " + dbColumn + ", " + referenceValue + ") "); + break; + default: + throw new ResponseException(ErrorType.BadRequestData, "Invalid georel operator: " + georelOp); + } + return sqlWhere.toString(); + } + +} diff --git a/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/service/HistoryService.java b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/service/HistoryService.java new file mode 100644 index 0000000000000000000000000000000000000000..e1c6fe07ca69721550b6df5758fcfea285bdde50 --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/service/HistoryService.java @@ -0,0 +1,444 @@ +package eu.neclab.ngsildbroker.historymanager.service; + +import java.net.URI; +import java.time.Instant; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.UUID; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.kafka.support.KafkaHeaders; +import org.springframework.messaging.handler.annotation.Header; +import org.springframework.messaging.handler.annotation.Payload; +import org.springframework.stereotype.Service; + +import com.google.gson.Gson; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.datatypes.TemporalEntityStorageKey; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.tools.SerializationTools; +import eu.neclab.ngsildbroker.historymanager.config.ProducerChannel; +import eu.neclab.ngsildbroker.historymanager.repository.HistoryDAO; + +@Service +public class HistoryService { + + private final static Logger logger = LoggerFactory.getLogger(HistoryService.class); + + @Autowired + KafkaOps kafkaOperations; + @Autowired + ParamsResolver paramsResolver; + @Autowired + HistoryDAO historyDAO; + +// public static final Gson GSON = DataSerializer.GSON; + + JsonParser parser = new JsonParser(); + + private final ProducerChannel producerChannels; + + public HistoryService(ProducerChannel producerChannels) { + this.producerChannels = producerChannels; + + } + + public URI createTemporalEntityFromEntity(String payload) throws ResponseException, Exception { + return createTemporalEntity(payload, true); + } + + public URI createTemporalEntityFromBinding(String payload) throws ResponseException, Exception { + return createTemporalEntity(payload, false); + } + + private URI createTemporalEntity(String payload, boolean fromEntity) throws ResponseException, Exception { + logger.trace("creating temporal entity"); + final JsonObject jsonObject = parser.parse(payload).getAsJsonObject(); + System.out.println(jsonObject.toString()); + + if (jsonObject.get(NGSIConstants.JSON_LD_ID) == null || jsonObject.get(NGSIConstants.JSON_LD_TYPE) == null) { + throw new ResponseException(ErrorType.InvalidRequest, "id and type are required fields"); + } + String now = SerializationTools.formatter.format(Instant.now()); + + if (jsonObject.get(NGSIConstants.NGSI_LD_CREATED_AT) == null + || jsonObject.get(NGSIConstants.NGSI_LD_CREATED_AT) == null) { + JsonArray temp = new JsonArray(); + JsonObject tempObj = new JsonObject(); + tempObj.addProperty(NGSIConstants.JSON_LD_TYPE, "DateTime"); + tempObj.addProperty(NGSIConstants.JSON_LD_VALUE, now); + temp.add(tempObj); + if (jsonObject.get(NGSIConstants.NGSI_LD_CREATED_AT) == null) { + jsonObject.add(NGSIConstants.NGSI_LD_CREATED_AT, temp); + } + if (jsonObject.get(NGSIConstants.NGSI_LD_MODIFIED_AT) == null) { + jsonObject.add(NGSIConstants.NGSI_LD_MODIFIED_AT, temp); + } + } + + String id = jsonObject.get(NGSIConstants.JSON_LD_ID).getAsString(); + String type = jsonObject.get(NGSIConstants.JSON_LD_TYPE).getAsJsonArray().get(0).getAsString(); + String createdAt = jsonObject.get(NGSIConstants.NGSI_LD_CREATED_AT).getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString(); + String modifiedAt = jsonObject.get(NGSIConstants.NGSI_LD_MODIFIED_AT).getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString(); + + Integer attributeCount = 0; + for (Map.Entry entry : jsonObject.entrySet()) { + logger.debug("Key = " + entry.getKey() + " Value = " + entry.getValue()); + if (entry.getKey().equalsIgnoreCase(NGSIConstants.JSON_LD_ID) + || entry.getKey().equalsIgnoreCase(NGSIConstants.JSON_LD_TYPE) + || entry.getKey().equalsIgnoreCase(NGSIConstants.NGSI_LD_CREATED_AT) + || entry.getKey().equalsIgnoreCase(NGSIConstants.NGSI_LD_MODIFIED_AT) + || entry.getKey().equalsIgnoreCase(NGSIConstants.NGSI_LD_INSTANCE_ID)) { + continue; + } + String attribId = entry.getKey(); + Boolean createTemporalEntityIfNotExists = (attributeCount == 0); // if it's the first attribute, create the + // temporalentity record + + if (entry.getValue().isJsonArray()) { + JsonArray valueArray = entry.getValue().getAsJsonArray(); + for (JsonElement jsonElement : valueArray) { + jsonElement = setCommonTemporalProperties(jsonElement, now, fromEntity); + pushAttributeToKafka(id, type, createdAt, modifiedAt, attribId, jsonElement.toString(), + createTemporalEntityIfNotExists, false); + } + } + attributeCount++; + } + if (attributeCount == 0) { // create empty temporalentity (no attributes) + TemporalEntityStorageKey tesk = new TemporalEntityStorageKey(id); + tesk.setEntityType(type); + tesk.setEntityCreatedAt(createdAt); + tesk.setEntityModifiedAt(modifiedAt); + String messageKey = DataSerializer.toJson(tesk); + logger.debug(" message key " + messageKey + " payload element: empty"); + kafkaOperations.pushToKafka(producerChannels.temporalEntityWriteChannel(), messageKey.getBytes(), + "".getBytes()); + } + logger.trace("temporal entity created " + id); + return new URI(AppConstants.HISTORY_URL + id); + } + + private JsonElement setCommonTemporalProperties(JsonElement jsonElement, String date, boolean fromEntity) { + String valueCreatedAt; + if (fromEntity) { + // reuse modifiedAt field from Attribute in Entity, if exists + if (jsonElement.getAsJsonObject().has(NGSIConstants.NGSI_LD_MODIFIED_AT) + && jsonElement.getAsJsonObject().get(NGSIConstants.NGSI_LD_MODIFIED_AT).isJsonArray() + && jsonElement.getAsJsonObject().get(NGSIConstants.NGSI_LD_MODIFIED_AT).getAsJsonArray() + .get(0) != null + && jsonElement.getAsJsonObject().get(NGSIConstants.NGSI_LD_MODIFIED_AT).getAsJsonArray().get(0) + .isJsonObject() + && jsonElement.getAsJsonObject().get(NGSIConstants.NGSI_LD_MODIFIED_AT).getAsJsonArray().get(0) + .getAsJsonObject().has(NGSIConstants.JSON_LD_VALUE)) { + valueCreatedAt = jsonElement.getAsJsonObject().get(NGSIConstants.NGSI_LD_MODIFIED_AT).getAsJsonArray() + .get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_VALUE).getAsString(); + } else { + valueCreatedAt = date; + } + } else { + valueCreatedAt = date; + } + // append/overwrite temporal fields. as we are creating new instances, + // modifiedAt and createdAt are the same + jsonElement = setTemporalProperty(jsonElement, NGSIConstants.NGSI_LD_CREATED_AT, valueCreatedAt); + jsonElement = setTemporalProperty(jsonElement, NGSIConstants.NGSI_LD_MODIFIED_AT, valueCreatedAt); + // system generated instance id + UUID uuid = UUID.randomUUID(); + String instanceid = "urn" + ":" + "ngsi-ld" + ":" + uuid; + jsonElement = setTemporalPropertyinstanceId(jsonElement, NGSIConstants.NGSI_LD_INSTANCE_ID, instanceid); + return jsonElement; + } + + private JsonElement setTemporalProperty(JsonElement jsonElement, String propertyName, String value) { + JsonObject objAttribute = jsonElement.getAsJsonObject(); + objAttribute.remove(propertyName); + JsonObject obj = new JsonObject(); + obj.addProperty(NGSIConstants.JSON_LD_TYPE, NGSIConstants.NGSI_LD_DATE_TIME); + obj.addProperty(NGSIConstants.JSON_LD_VALUE, value); + JsonArray arr = new JsonArray(); + arr.add(obj); + objAttribute.add(propertyName, arr); + return objAttribute; + } + + // system generated instance id + private JsonElement setTemporalPropertyinstanceId(JsonElement jsonElement, String propertyName, String value) { + JsonObject objAttribute = jsonElement.getAsJsonObject(); + objAttribute.remove(propertyName); + JsonObject obj = new JsonObject(); + obj.addProperty(NGSIConstants.JSON_LD_ID, value); + JsonArray arr = new JsonArray(); + arr.add(obj); + objAttribute.add(propertyName, arr); + return objAttribute; + } + + private void pushAttributeToKafka(String entityId, String entityType, String entityCreatedAt, + String entityModifiedAt, String attributeId, String elementValue, Boolean createTemporalEntityIfNotExists, + Boolean overwriteOp) throws ResponseException { + String messageKey; + TemporalEntityStorageKey tesk = new TemporalEntityStorageKey(entityId); + if (createTemporalEntityIfNotExists != null && createTemporalEntityIfNotExists) { + tesk.setEntityType(entityType); + tesk.setEntityCreatedAt(entityCreatedAt); + tesk.setEntityModifiedAt(entityModifiedAt); + tesk.setAttributeId(attributeId); + messageKey = DataSerializer.toJson(tesk); + } else { + tesk.setEntityModifiedAt(entityModifiedAt); + tesk.setAttributeId(attributeId); + tesk.setOverwriteOp(overwriteOp); + messageKey = DataSerializer.toJson(tesk); + } + logger.debug(" message key " + messageKey + " payload element " + elementValue); + kafkaOperations.pushToKafka(producerChannels.temporalEntityWriteChannel(), messageKey.getBytes(), + elementValue.getBytes()); + } + + private void pushAttributeToKafka(String id, String entityModifiedAt, String attributeId, String elementValue) + throws ResponseException { + pushAttributeToKafka(id, null, null, entityModifiedAt, attributeId, elementValue, null, null); + } + + public void delete(String entityId, String attributeId, String instanceId, List linkHeaders) + throws ResponseException, Exception { + logger.debug("deleting temporal entity with id : " + entityId + "and attributeId : " + attributeId); + + String resolvedAttrId = null; + if (attributeId != null) { + resolvedAttrId = paramsResolver.expandAttribute(attributeId, linkHeaders); + } + TemporalEntityStorageKey tesk = new TemporalEntityStorageKey(entityId); + tesk.setAttributeId(resolvedAttrId); + tesk.setInstanceId(instanceId); + String messageKey = DataSerializer.toJson(tesk); + logger.trace("message key created : " + messageKey); + kafkaOperations.pushToKafka(producerChannels.temporalEntityWriteChannel(), messageKey.getBytes(), + "null".getBytes()); + logger.trace("temporal entity (" + entityId + ") deleted"); + } + + // endpoint "/entities/{entityId}/attrs" + public void addAttrib2TemporalEntity(String entityId, String payload) throws ResponseException, Exception { + logger.trace("replace attribute in temporal entity"); + final JsonObject jsonObject = parser.parse(payload).getAsJsonObject(); + String now = SerializationTools.formatter.format(Instant.now()); + if (!historyDAO.entityExists(entityId)) { + throw new ResponseException(ErrorType.NotFound, "You cannot create an attribute on a none existing entity"); + } + for (Map.Entry entry : jsonObject.entrySet()) { + logger.debug("Key = " + entry.getKey() + " Value = " + entry.getValue()); + if (entry.getKey().equalsIgnoreCase(NGSIConstants.JSON_LD_ID) + || entry.getKey().equalsIgnoreCase(NGSIConstants.JSON_LD_TYPE) + || entry.getKey().equalsIgnoreCase(NGSIConstants.NGSI_LD_CREATED_AT) + || entry.getKey().equalsIgnoreCase(NGSIConstants.NGSI_LD_MODIFIED_AT)) { + continue; + } + + String attribId = entry.getKey(); + if (entry.getValue().isJsonArray()) { + JsonArray valueArray = entry.getValue().getAsJsonArray(); + Integer instanceCount = 0; + for (JsonElement jsonElement : valueArray) { + jsonElement = setCommonTemporalProperties(jsonElement, now, false); + // + Boolean overwriteOp = (instanceCount == 0); // if it's the first one, send the overwrite op to + // delete current values + pushAttributeToKafka(entityId, null, null, now, attribId, jsonElement.toString(), false, + overwriteOp); + instanceCount++; + } + } + } + logger.trace("attribute replaced in temporalentity " + entityId); + } + + // for endpoint "entities/{entityId}/attrs/{attrId}/{instanceId}") + public void modifyAttribInstanceTemporalEntity(String entityId, String payload, String attribId, String instanceId, + List linkHeaders) throws ResponseException, Exception { + + String now = SerializationTools.formatter.format(Instant.now()); + + String resolvedAttrId = null; + if (attribId != null) { + resolvedAttrId = paramsResolver.expandAttribute(attribId, linkHeaders); + } + + // check if entityId + attribId + instanceid exists. if not, throw exception + // ResourceNotFound + QueryParams qp = new QueryParams(); + qp.setId(entityId); + qp.setAttrs(resolvedAttrId); + qp.setInstanceId(instanceId); + List entityList = historyDAO.query(qp); + if (entityList.size() == 0) { + throw new ResponseException(ErrorType.NotFound); + } + + // get original createdAt + String createdAt = now; + String instanceIdAdd = null; + JsonArray jsonArray = null; + try { + jsonArray = parser.parse(historyDAO.getListAsJsonArray(entityList)).getAsJsonArray(); + createdAt = jsonArray.get(0).getAsJsonObject().get(resolvedAttrId).getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.NGSI_LD_CREATED_AT).getAsJsonArray().get(0).getAsJsonObject() + .get(NGSIConstants.JSON_LD_VALUE).getAsString(); + } catch (Exception e) { + e.printStackTrace(); + logger.warn("original createdAt element not found, using current timestamp"); + } + + logger.debug("modify attribute instance in temporal entity " + entityId + " - " + resolvedAttrId + " - " + + createdAt); + + final JsonObject jsonObject = parser.parse(payload).getAsJsonObject(); + + for (Map.Entry entry : jsonObject.entrySet()) { + logger.debug("Key = " + entry.getKey() + " Value = " + entry.getValue()); + if (entry.getKey().equalsIgnoreCase(NGSIConstants.JSON_LD_ID) + || entry.getKey().equalsIgnoreCase(NGSIConstants.JSON_LD_TYPE) + || entry.getKey().equalsIgnoreCase(NGSIConstants.NGSI_LD_CREATED_AT) + || entry.getKey().equalsIgnoreCase(NGSIConstants.NGSI_LD_MODIFIED_AT)) { + continue; + } + String attribIdPayload = entry.getKey(); + if (!attribIdPayload.equals(resolvedAttrId)) { + throw new ResponseException(ErrorType.InvalidRequest, + "attribute id in payload and in URL must be the same: " + attribIdPayload + " (payload) / " + + resolvedAttrId + " (URL)"); + } + + if (entry.getValue().isJsonArray()) { + JsonArray valueArray = entry.getValue().getAsJsonArray(); + for (JsonElement jsonElement : valueArray) { + if (jsonElement.getAsJsonObject().get(NGSIConstants.NGSI_LD_INSTANCE_ID) != null) { + if (!jsonElement.getAsJsonObject().get(NGSIConstants.NGSI_LD_INSTANCE_ID).getAsJsonArray() + .get(0).getAsJsonObject().get(NGSIConstants.JSON_LD_ID).getAsString() + .equals(instanceId)) { + throw new ResponseException(ErrorType.InvalidRequest, + "instanceId in payload and in URL must be the same"); + } + } else { + instanceIdAdd = jsonArray.get(0).getAsJsonObject().get(resolvedAttrId).getAsJsonArray().get(0) + .getAsJsonObject().get(NGSIConstants.NGSI_LD_INSTANCE_ID).getAsJsonArray().get(0) + .getAsJsonObject().get(NGSIConstants.JSON_LD_ID).getAsString(); + jsonElement = setTemporalPropertyinstanceId(jsonElement, NGSIConstants.NGSI_LD_INSTANCE_ID, + instanceIdAdd); + } + jsonElement = setTemporalProperty(jsonElement, NGSIConstants.NGSI_LD_CREATED_AT, createdAt); + jsonElement = setTemporalProperty(jsonElement, NGSIConstants.NGSI_LD_MODIFIED_AT, now); + pushAttributeToKafka(entityId, now, attribIdPayload, jsonElement.toString()); + } + } + } + logger.trace("instance modified in temporalentity " + entityId); + } + + /* + + */ + @KafkaListener(topics = "${entity.create.topic}", groupId = "historyManagerCreate") + public void handleEntityCreate(@Payload byte[] message, @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) String key) + throws Exception { + logger.trace("Listener handleEntityCreate..."); + String payload = new String(message); + logger.debug("Received message: " + payload); + createTemporalEntityFromEntity(payload); + } + + @KafkaListener(topics = "${entity.append.topic}", groupId = "historyManagerAppend") + public void handleEntityAppend(@Payload byte[] message, @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) String key) + throws Exception { + logger.trace("Listener handleEntityAppend..."); + + logger.debug("Received key: " + key); + String payload = new String(message); + logger.debug("Received message: " + payload); + + String now = SerializationTools.formatter.format(Instant.now()); + + final JsonObject jsonObject = parser.parse(payload).getAsJsonObject(); + for (Map.Entry entry : jsonObject.entrySet()) { + logger.debug("Key = " + entry.getKey() + " Value = " + entry.getValue()); + if (entry.getKey().equalsIgnoreCase(NGSIConstants.JSON_LD_ID) + || entry.getKey().equalsIgnoreCase(NGSIConstants.JSON_LD_TYPE) + || entry.getKey().equalsIgnoreCase(NGSIConstants.NGSI_LD_CREATED_AT) + || entry.getKey().equalsIgnoreCase(NGSIConstants.NGSI_LD_MODIFIED_AT)) { + continue; + } + String attribIdPayload = entry.getKey(); + + if (entry.getValue().isJsonArray()) { + JsonArray valueArray = entry.getValue().getAsJsonArray(); + for (JsonElement jsonElement : valueArray) { + jsonElement = setCommonTemporalProperties(jsonElement, now, true); + pushAttributeToKafka(key, now, attribIdPayload, jsonElement.toString()); + } + } + } + + } + + @KafkaListener(topics = "${entity.update.topic}", groupId = "historyManagerUpdate") + public void handleEntityUpdate(@Payload byte[] message, @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) String key) + throws Exception { + logger.trace("Listener handleEntityUpdate..."); + + logger.debug("Received key: " + key); + String payload = new String(message); + logger.debug("Received message: " + payload); + + String now = SerializationTools.formatter.format(Instant.now()); + + final JsonObject jsonObject = parser.parse(payload).getAsJsonObject(); + for (Map.Entry entry : jsonObject.entrySet()) { + logger.debug("Key = " + entry.getKey() + " Value = " + entry.getValue()); + if (entry.getKey().equalsIgnoreCase(NGSIConstants.JSON_LD_ID) + || entry.getKey().equalsIgnoreCase(NGSIConstants.JSON_LD_TYPE) + || entry.getKey().equalsIgnoreCase(NGSIConstants.NGSI_LD_CREATED_AT) + || entry.getKey().equalsIgnoreCase(NGSIConstants.NGSI_LD_MODIFIED_AT)) { + continue; + } + String attribIdPayload = entry.getKey(); + + if (entry.getValue().isJsonArray()) { + JsonArray valueArray = entry.getValue().getAsJsonArray(); + for (JsonElement jsonElement : valueArray) { + jsonElement = setCommonTemporalProperties(jsonElement, now, true); + pushAttributeToKafka(key, now, attribIdPayload, jsonElement.toString()); + } + } + } + + } + + @KafkaListener(topics = "${entity.delete.topic}", groupId = "historyManagerDelete") + public void handleEntityDelete(@Payload byte[] message, @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) String key) + throws Exception { + logger.trace("Listener handleEntityDelete..."); + + logger.debug("Received key: " + key); + String payload = new String(message); + logger.debug("Received message: " + payload); + } + +} diff --git a/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/utils/Validator.java b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/utils/Validator.java new file mode 100644 index 0000000000000000000000000000000000000000..66513e369956f8fa8a5efef8d8688e7d04995fdc --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/main/java/eu/neclab/ngsildbroker/historymanager/utils/Validator.java @@ -0,0 +1,107 @@ +package eu.neclab.ngsildbroker.historymanager.utils; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; + +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; + +public class Validator { + /* + * @Autowired static ObjectMapper objectMapper; + */ + + private final static Logger logger = LoggerFactory.getLogger(Validator.class); + private static Map getParameterMap() { + Map paramMap = new HashMap(); + paramMap.put(NGSIConstants.QUERY_PARAMETER_TYPE, NGSIConstants.QUERY_PARAMETER_TYPE); + paramMap.put(NGSIConstants.QUERY_PARAMETER_ID, NGSIConstants.QUERY_PARAMETER_ID); + paramMap.put(NGSIConstants.QUERY_PARAMETER_IDPATTERN, NGSIConstants.QUERY_PARAMETER_IDPATTERN); + paramMap.put(NGSIConstants.QUERY_PARAMETER_ATTRS, NGSIConstants.QUERY_PARAMETER_ATTRS); + paramMap.put(NGSIConstants.QUERY_PARAMETER_QUERY, NGSIConstants.QUERY_PARAMETER_QUERY); + paramMap.put(NGSIConstants.QUERY_PARAMETER_GEOREL, NGSIConstants.QUERY_PARAMETER_GEOREL); + paramMap.put(NGSIConstants.QUERY_PARAMETER_GEOMETRY, NGSIConstants.QUERY_PARAMETER_GEOMETRY); + paramMap.put(NGSIConstants.QUERY_PARAMETER_COORDINATES, NGSIConstants.QUERY_PARAMETER_COORDINATES); + paramMap.put(NGSIConstants.QUERY_PARAMETER_GEOPROPERTY, NGSIConstants.QUERY_PARAMETER_GEOPROPERTY); + paramMap.put(NGSIConstants.QUERY_PARAMETER_TIMEREL, NGSIConstants.QUERY_PARAMETER_TIMEREL); + paramMap.put(NGSIConstants.QUERY_PARAMETER_TIME, NGSIConstants.QUERY_PARAMETER_TIME); + paramMap.put(NGSIConstants.QUERY_PARAMETER_TIMEPROPERTY, NGSIConstants.QUERY_PARAMETER_TIMEPROPERTY); + paramMap.put(NGSIConstants.QUERY_PARAMETER_ENDTIME, NGSIConstants.QUERY_PARAMETER_ENDTIME); + paramMap.put(NGSIConstants.QUERY_PARAMETER_OPTIONS, NGSIConstants.QUERY_PARAMETER_OPTIONS); + return paramMap; + } + + private static List splitQueryParameter(String url) { + List queryPairs = null; + if (url != null && url.trim().length() > 0) { + queryPairs = new ArrayList(); + String[] pairs = url.split("&"); + for (String pair : pairs) { + int idx = pair.indexOf("="); + queryPairs.add(pair.substring(0, idx)); + } + } + return queryPairs; + } + + public static boolean validate(String queryString) { + boolean result = true; + + try { + String decodedQuery = URLDecoder.decode(queryString, StandardCharsets.UTF_8.name()); + Map paramMap = getParameterMap(); + List queriesKey = splitQueryParameter(decodedQuery); + logger.info("Query String:" + decodedQuery); + logger.info("Queries Key:" + queriesKey); + for (String key : queriesKey) { + String value = paramMap.get(key); + if (value == null) { + logger.info("this key is null:"+key); + result = false; + break; + } + } + return result; + } catch (Exception e) { + // Handle exceptions gracefully (e.g., logging) + e.printStackTrace(); + } + return false; + } + + public static void validateTemporalEntity(String payload) throws ResponseException, Exception { + JsonParser parser = new JsonParser(); + if (payload == null) { + throw new ResponseException(ErrorType.UnprocessableEntity); + } + try { + JsonObject jsonObject = parser.parse(payload).getAsJsonObject(); + if (jsonObject.isJsonNull()) { + throw new ResponseException(ErrorType.OperationNotSupported); + } + if (!jsonObject.has(NGSIConstants.QUERY_PARAMETER_ID) + || !jsonObject.has(NGSIConstants.QUERY_PARAMETER_TYPE)) { + throw new ResponseException(ErrorType.BadRequestData); + } + //for {"id":""} case + if (jsonObject.get(NGSIConstants.QUERY_PARAMETER_ID).getAsString().trim().length() == 0 + || jsonObject.get(NGSIConstants.QUERY_PARAMETER_TYPE).getAsString().trim().length() == 0) { + throw new ResponseException(ErrorType.BadRequestData); + } + } catch (Exception e) { + throw new ResponseException(ErrorType.BadRequestData); + } + } + +} diff --git a/scorpio-broker/History/HistoryManager/src/main/resources/application-aaio.yml b/scorpio-broker/History/HistoryManager/src/main/resources/application-aaio.yml new file mode 100644 index 0000000000000000000000000000000000000000..ecb3ce4797c9fc0a101c9c53a347394e2f6b424c --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/main/resources/application-aaio.yml @@ -0,0 +1,34 @@ +server: + port: 1040 + +bootstrap: + servers: kafka:9092 + +atcontext: + url: http://localhost:9090/ngsi-ld/contextes/ + +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +spring: + kafka: + consumer: + bootstrap-servers: kafka:9092 + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + datasource: + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_querymanager + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 diff --git a/scorpio-broker/History/HistoryManager/src/main/resources/application-aio.yml b/scorpio-broker/History/HistoryManager/src/main/resources/application-aio.yml new file mode 100644 index 0000000000000000000000000000000000000000..0a5a5da137f401237c6c8c9d5d45bdd1598787d8 --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/main/resources/application-aio.yml @@ -0,0 +1,34 @@ +server: + port: 1040 + +bootstrap: + servers: localhost:9092 + +atcontext: + url: http://localhost:9090/ngsi-ld/contextes/ + +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +spring: + kafka: + consumer: + bootstrap-servers: localhost:9092 + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + datasource: + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_querymanager + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 diff --git a/scorpio-broker/History/HistoryManager/src/main/resources/application-dist.yml b/scorpio-broker/History/HistoryManager/src/main/resources/application-dist.yml new file mode 100644 index 0000000000000000000000000000000000000000..0353ad7c2865c3d149ad7fdeb961b00ffabc1e8e --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/main/resources/application-dist.yml @@ -0,0 +1,34 @@ +server: + port: 1040 + +bootstrap: + servers: kafka:9092 + +atcontext: + url: http://gateway:9090/ngsi-ld/contextes/ + +eureka: + client: + serviceUrl: + defaultZone: http://eureka:8761/eureka/ + +spring: + kafka: + consumer: + bootstrap-servers: kafka:9092 + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + datasource: + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_querymanager + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 diff --git a/scorpio-broker/History/HistoryManager/src/main/resources/application.yml b/scorpio-broker/History/HistoryManager/src/main/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..9e4db443a3d81fcd12ea8dcdbfa705db0b6f0a2b --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/main/resources/application.yml @@ -0,0 +1,64 @@ +spring: + application: + name: history-manager + main: + lazy-initialization: true + datasource: + url: "jdbc:postgresql://127.0.0.1:5432/ngb?ApplicationName=ngb_historymanager" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + bindings: + TEMPORAL_ENTITY_WRITE_CHANNEL: + destination: TEMPORALENTITY + contentType: application/json + +server: + port: 1040 + + + +entity: + create: + topic: ENTITY_CREATE + append: + topic: ENTITY_APPEND + update: + topic: ENTITY_UPDATE + delete: + topic: ENTITY_DELETE + temporal: + topic: TEMPORALENTITY + +#kafka broker path +bootstrap: + servers: localhost:9092 + + +management: + endpoints: + web: + exposure: + include: "*" + endpoint: + restart: + enabled: true + + +atcontext: + url: http://localhost:9090/ngsi-ld/contextes/ + + + + \ No newline at end of file diff --git a/scorpio-broker/History/HistoryManager/src/main/resources/log4j2-spring.xml b/scorpio-broker/History/HistoryManager/src/main/resources/log4j2-spring.xml new file mode 100644 index 0000000000000000000000000000000000000000..33bf54e5fd85746a380ed01b089506cf870454be --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/main/resources/log4j2-spring.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + %d %p %C{1.} [%t] %m%n + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/scorpio-broker/History/HistoryManager/src/test/java/eu/neclab/ngsildbroker/historymanager/HistoryHandlerTest.java b/scorpio-broker/History/HistoryManager/src/test/java/eu/neclab/ngsildbroker/historymanager/HistoryHandlerTest.java new file mode 100644 index 0000000000000000000000000000000000000000..3e97685bd0a05543e2f86e8d639072e0e1c98f4b --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/test/java/eu/neclab/ngsildbroker/historymanager/HistoryHandlerTest.java @@ -0,0 +1,15 @@ +package eu.neclab.ngsildbroker.historymanager; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest(properties= {"spring.main.allow-bean-definition-overriding=true"}) + +public class HistoryHandlerTest { + @Test + public void contextLoads() { + } + +} diff --git a/scorpio-broker/History/HistoryManager/src/test/java/eu/neclab/ngsildbroker/historymanager/controller/HistoryControllerTest.java b/scorpio-broker/History/HistoryManager/src/test/java/eu/neclab/ngsildbroker/historymanager/controller/HistoryControllerTest.java new file mode 100644 index 0000000000000000000000000000000000000000..18e02a9212373903c61c9943e27d5341896641f6 --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/test/java/eu/neclab/ngsildbroker/historymanager/controller/HistoryControllerTest.java @@ -0,0 +1,434 @@ +package eu.neclab.ngsildbroker.historymanager.controller; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; +import java.net.URI; +import java.util.ArrayList; +import java.util.List; +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.web.client.MockRestServiceServer; +import org.springframework.test.web.servlet.MockMvc; +import org.springframework.test.web.servlet.request.MockMvcRequestBuilders; +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.historymanager.repository.HistoryDAO; +import eu.neclab.ngsildbroker.historymanager.service.HistoryService; +import eu.neclab.ngsildbroker.historymanager.utils.Validator; + +@RunWith(SpringRunner.class) +@SpringBootTest(properties = { "spring.main.allow-bean-definition-overriding=true" }) +@AutoConfigureMockMvc +public class HistoryControllerTest { + @Autowired + private MockMvc mockMvc; + + @Mock + private ParamsResolver paramResolver; + + @MockBean + private HistoryService historyService; + + @MockBean + private Validator validate; + + @Mock + HistoryDAO historyDAO; + + @Autowired + ContextResolverBasic contextResolver; + + @Value("${atcontext.url}") + String atContextServerUrl; + + private String temporalPayload; + private URI uri; + + @Before + public void setup() throws Exception { + + uri = new URI(AppConstants.HISTORY_URL + "urn:ngsi-ld:testunit:151"); + + MockitoAnnotations.initMocks(this); + + // @formatter:on + + temporalPayload = "{\r\n " + "\"id\": \"urn:ngsi-ld:testunit:151\"," + + "\r\n \"type\": \"AirQualityObserved\"," + "\r\n \"airQualityLevel\": " + "[\r\n {" + + "\r\n " + "\r\n " + + "\"type\": \"Property\",\r\n \"value\": \"good\"," + + "\r\n \"observedAt\": \"2018-08-07T12:00:00Z\"" + "\r\n }," + "\r\n {" + + "\r\n " + "\r\n \"type\": \"Property\"," + + "\r\n \"value\": \"moderate\"," + + "\r\n \"observedAt\": \"2018-08-14T12:00:00Z\"" + "\r\n }," + "\r\n " + + "{\r\n " + "\r\n \"type\": \"Property\"," + + "\r\n \"value\": \"unhealthy\"," + + "\r\n \"observedAt\": \"2018-09-14T12:00:00Z\"" + "\r\n }\r\n ]," + + "\r\n \"@context\": [" + "\r\n \"https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld\"" + + "\r\n ]\r\n}\r\n\r\n"; + + } + + @After + public void tearDown() { + temporalPayload = ""; + } + + /** + * this method is use for create the temporalEntity + */ + + @Test + public void createTemporalEntityTest() { + try { + when(historyService.createTemporalEntityFromBinding(any())).thenReturn(uri); + mockMvc.perform(post("/ngsi-ld/v1/temporal/entities/").contentType(AppConstants.NGB_APPLICATION_JSONLD) + .accept(AppConstants.NGB_APPLICATION_JSONLD).content(temporalPayload)) + .andExpect(status().isCreated()); + verify(historyService, times(1)).createTemporalEntityFromBinding(any()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is try to create the temporalEntity having "BAD REQUEST" + */ + + @Test + public void createTemporalEntityBadRequestTest() { + try { + when(historyService.createTemporalEntityFromBinding(any())) + .thenThrow(new ResponseException(ErrorType.BadRequestData)); + mockMvc.perform(post("/ngsi-ld/v1/temporal/entities/").contentType(AppConstants.NGB_APPLICATION_JSONLD) + .accept(AppConstants.NGB_APPLICATION_JSONLD).content(temporalPayload)) + .andExpect(status().isBadRequest()).andExpect(jsonPath("$.title").value("Bad Request Data.")); + + verify(historyService, times(1)).createTemporalEntityFromBinding(any()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is try to create the temporalEntity having "INTERNAL SERVER + * ERROR" + */ + + @Test + public void createTemporalEntityInternalServerErrorTest() { + try { + when(historyService.createTemporalEntityFromBinding(any())).thenThrow(new Exception()); + mockMvc.perform(post("/ngsi-ld/v1/temporal/entities/").contentType(AppConstants.NGB_APPLICATION_JSONLD) + .accept(AppConstants.NGB_APPLICATION_JSONLD).content(temporalPayload)) + .andExpect(status().isInternalServerError()); + + verify(historyService, times(1)).createTemporalEntityFromBinding(any()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is use for update the temporalEntity + */ + + @Test + public void updateAttrById() { + try { + mockMvc.perform(post("/ngsi-ld/v1/temporal/entities/urn:ngsi-ld:testunit:151/attrs") + .contentType(AppConstants.NGB_APPLICATION_JSONLD).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(temporalPayload)).andExpect(status().isNoContent()); + + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is try to update the temporalEntity with "INTERNAL SERVER + * ERROR" + */ + + @Test + public void updateAttrByIdInternalServerError() { + try { + Mockito.doThrow(new Exception()).when(historyService).addAttrib2TemporalEntity(any(), any()); + mockMvc.perform(post("/ngsi-ld/v1/temporal/entities/urn:ngsi-ld:testunit:151/attrs") + .contentType(AppConstants.NGB_APPLICATION_JSONLD).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(temporalPayload)).andExpect(status().isInternalServerError()); + + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is try to update the temporalEntity having "BAD REQUEST" + */ + + @Test + public void updateAttrByIdBadRequest() { + try { + Mockito.doThrow(new ResponseException(ErrorType.BadRequestData)).when(historyService) + .addAttrib2TemporalEntity(any(), any()); + mockMvc.perform(post("/ngsi-ld/v1/temporal/entities/urn:ngsi-ld:testunit:151/attrs") + .contentType(AppConstants.NGB_APPLICATION_JSONLD).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(temporalPayload)).andExpect(status().isBadRequest()); + + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is use for modify the attribute of temporalEntity + */ + + @Test + public void modifyAttribInstanceTemporalEntityTest() { + try { + mockMvc.perform(patch("/ngsi-ld/v1/temporal/entities/{entityId}/attrs/{attrId}/{instanceId}", + "urn:ngsi-ld:testunit:151", "airQualityLevel", "urn:ngsi-ld:d43aa0fe-a986-4479-9fac-35b7eba232041") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(temporalPayload)) + .andExpect(status().isNoContent()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is try to modify the attribute of temporalEntity having + * "INTERNAL SERVER ERROR" + */ + + @Test + public void modifyAttribInstanceTemporalEntityInternalServerError() { + + try { + Mockito.doThrow(new Exception()).when(historyService).modifyAttribInstanceTemporalEntity(any(), any(), + any(), any(), any()); + mockMvc.perform(patch("/ngsi-ld/v1/temporal/entities/{entityId}/attrs/{attrId}/{instanceId}", + "urn:ngsi-ld:testunit:151", "airQualityLevel", "urn:ngsi-ld:d43aa0fe-a986-4479-9fac-35b7eba232041") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(temporalPayload)) + .andExpect(status().isInternalServerError()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is try to modify the attribute of temporalEntity having "BAD + * REQUEST" + */ + + @Test + public void modifyAttribInstanceTemporalEntityBadRequest() { + + try { + Mockito.doThrow(new ResponseException(ErrorType.BadRequestData)).when(historyService) + .modifyAttribInstanceTemporalEntity(any(), any(), any(), any(), any()); + mockMvc.perform(patch("/ngsi-ld/v1/temporal/entities/{entityId}/attrs/{attrId}/{instanceId}", + "urn:ngsi-ld:testunit:151", "airQualityLevel", "urn:ngsi-ld:d43aa0fe-a986-4479-9fac-35b7eba232041") + .contentType(AppConstants.NGB_APPLICATION_JSON).accept(AppConstants.NGB_APPLICATION_JSONLD) + .content(temporalPayload)) + .andExpect(status().isBadRequest()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is use for delete the temporalEntity by attribute + */ + + @Test + public void deleteTemporalEntityByAttr() { + List linkHeaders = new ArrayList<>(); + try { + mockMvc.perform( + MockMvcRequestBuilders + .delete("/ngsi-ld/v1/temporal/entities/{entityId}/attrs/{attrId}", + "urn:ngsi-ld:testunit:151", "airQualityLevel") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)) + .andExpect(status().isNoContent()); + verify(historyService, times(1)).delete("urn:ngsi-ld:testunit:151", "airQualityLevel", null, linkHeaders); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is use to delete the temporalEntity + */ + + @Test + public void deleteTemporalEntity() { + List linkHeaders = new ArrayList<>(); + try { + mockMvc.perform(MockMvcRequestBuilders + .delete("/ngsi-ld/v1/temporal/entities/{entityId}", "urn:ngsi-ld:testunit:151") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)).andExpect(status().isNoContent()); + verify(historyService, times(1)).delete("urn:ngsi-ld:testunit:151", null, null, linkHeaders); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is try to delete the temporalEntity having "INTERNAL SERVER + * ERROR" + */ + + @Test + public void deleteTemporalEntityInternalServerError() { + List linkHeaders = null; + try { + Mockito.doThrow(new Exception()).when(historyService).delete(any(), any(), any(), any()); + mockMvc.perform(MockMvcRequestBuilders + .delete("/ngsi-ld/v1/temporal/entities/{entities}", "urn:ngsi-ld:testunit:151") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)).andExpect(status().isInternalServerError()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is try to delete the temporalEntity having "BAD REQUEST" + */ + + @Test + public void deleteTemporalEntityBadRequest() { + List linkHeaders = null; + try { + Mockito.doThrow(new ResponseException(ErrorType.BadRequestData)).when(historyService).delete(any(), any(), + any(), any()); + mockMvc.perform(MockMvcRequestBuilders + .delete("/ngsi-ld/v1/temporal/entities/{entities}", "urn:ngsi-ld:testunit:151") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)).andExpect(status().isBadRequest()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is try to delete the attribute of temporalEntity having + * "INTERNAL SERVER ERROR" + */ + + @Test + public void deleteTemporalEntityByAttrInternalServerError() { + List linkHeaders = null; + try { + Mockito.doThrow(new Exception()).when(historyService).delete(any(), any(), any(), any()); + mockMvc.perform( + MockMvcRequestBuilders + .delete("/ngsi-ld/v1/temporal/entities/{entityId}/attrs/{attrId}", + "urn:ngsi-ld:testunit:151", "airQualityLevel") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)) + .andExpect(status().isInternalServerError()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is try to delete the attribute of temporalEntity having "BAD + * REQUEST" + */ + + @Test + public void deleteTemporalEntityByAttrBadRequest() { + List linkHeaders = null; + try { + Mockito.doThrow(new ResponseException(ErrorType.BadRequestData)).when(historyService).delete(any(), any(), + any(), any()); + mockMvc.perform( + MockMvcRequestBuilders + .delete("/ngsi-ld/v1/temporal/entities/{entityId}/attrs/{attrId}", + "urn:ngsi-ld:testunit:151", "airQualityLevel") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)) + .andExpect(status().isBadRequest()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is used to delete the temporal entity having "Resource not + * found". + */ + + @Test + public void deleteTemporalEntityByAttrResourceNotFound() { + List linkHeaders = null; + try { + Mockito.doThrow(new ResponseException(ErrorType.NotFound)).when(historyService).delete(any(), any(), any(), + any()); + mockMvc.perform( + MockMvcRequestBuilders + .delete("/ngsi-ld/v1/temporal/entities/{entityId}/attrs/{attrId}", + "urn:ngsi-ld:testunit:151", "airQualityLevel") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)) + .andExpect(status().isNotFound()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is used get the temporalEntity by ID. + */ + + @Test + public void getTemporalEntityById() { + try { + + mockMvc.perform(get("/ngsi-ld/v1/temporal/entities/{entityId}", "urn:ngsi-ld:testunit:151") + .accept(AppConstants.NGB_APPLICATION_JSON)).andExpect(status().isOk()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } + + /** + * this method is used get the temporalEntity by time filter. + */ + + @Test + public void getTemporalEntityByTimefilter() { + try { + mockMvc.perform( + get("/ngsi-ld/v1/temporal/entities/2018-08-07T12:00:00Z").accept(AppConstants.NGB_APPLICATION_JSON)) + .andExpect(status().isOk()).andDo(print()); + + } catch (Exception e) { + Assert.fail(e.getMessage()); + } + } +} diff --git a/scorpio-broker/History/HistoryManager/src/test/java/eu/neclab/ngsildbroker/historymanager/service/HistoryServiceTest.java b/scorpio-broker/History/HistoryManager/src/test/java/eu/neclab/ngsildbroker/historymanager/service/HistoryServiceTest.java new file mode 100644 index 0000000000000000000000000000000000000000..73f067b179e33df3e39a34173f7451c36623913d --- /dev/null +++ b/scorpio-broker/History/HistoryManager/src/test/java/eu/neclab/ngsildbroker/historymanager/service/HistoryServiceTest.java @@ -0,0 +1,154 @@ +package eu.neclab.ngsildbroker.historymanager.service; + +import java.net.URI; +import java.util.List; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.mockito.Spy; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.web.servlet.MockMvc; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.historymanager.config.ProducerChannel; +import eu.neclab.ngsildbroker.historymanager.repository.HistoryDAO; + +@RunWith(SpringRunner.class) +@SpringBootTest +@AutoConfigureMockMvc +public class HistoryServiceTest { + + @Autowired + private MockMvc mockMvc; + + @Mock + ProducerChannel producerChannels; + + @Mock + KafkaOps kafkaOperations; + + @Mock + HistoryDAO historyDAO; + + @Mock + ParamsResolver paramsResolver; + + @InjectMocks + @Spy + private HistoryService historyService; + + URI uri; + + private String temporalPayload; + + @Before + public void setUp() throws Exception { + MockitoAnnotations.initMocks(this); + uri=new URI(AppConstants.HISTORY_URL + "urn:ngsi-ld:testunit:151"); + + temporalPayload="{\r\n \"https://uri.etsi.org/ngsi-ld/default-context/airQualityLevel\" : [ " + + "{\r\n \"https://uri.etsi.org/ngsi-ld/observedAt\" : [ " + + "{\r\n \"@value\" : \"2018-08-07T12:00:00Z\"," + + "\r\n \"@type\" : \"https://uri.etsi.org/ngsi-ld/DateTime\"" + + "\r\n } ]," + + "\r\n \"@type\" : [ \"https://uri.etsi.org/ngsi-ld/Property\" ]," + + "\r\n \"https://uri.etsi.org/ngsi-ld/hasValue\" : [ {" + + "\r\n \"@value\" : \"good\"" + + "\r\n } ]" + + "\r\n }, {" + + "\r\n \"https://uri.etsi.org/ngsi-ld/observedAt\" : [ {" + + "\r\n \"@value\" : \"2018-08-14T12:00:00Z\"," + + "\r\n \"@type\" : \"https://uri.etsi.org/ngsi-ld/DateTime\"" + + "\r\n } ],\r\n \"@type\" : [ \"https://uri.etsi.org/ngsi-ld/Property\" ]," + + "\r\n \"https://uri.etsi.org/ngsi-ld/hasValue\" : [ {" + + "\r\n \"@value\" : \"moderate\"\r\n } ]" + + "\r\n }, {" + + "\r\n \"https://uri.etsi.org/ngsi-ld/observedAt\" : [ {" + + "\r\n \"@value\" : \"2018-09-14T12:00:00Z\",\r\n \"@type\" : \"https://uri.etsi.org/ngsi-ld/DateTime\"\r\n } ]," + + "\r\n \"@type\" : [ \"https://uri.etsi.org/ngsi-ld/Property\" ]," + + "\r\n \"https://uri.etsi.org/ngsi-ld/hasValue\" : [ {" + + "\r\n \"@value\" : \"unhealthy\"\r\n } ]" + + "\r\n } ]," + + "\r\n \"@id\" : \"urn:ngsi-ld:testunit:159\"," + + "\r\n \"@type\" : [ \"https://uri.etsi.org/ngsi-ld/default-context/AirQualityObserved\" ]" + + "\r\n}"; + } + + /** + * this method is use test "createTemporalEntityFromBinding" method of HistoryService + */ + + @Test + public void createTemporalEntityFromBindingTest() { + try { + URI uri1=historyService.createTemporalEntityFromBinding(temporalPayload); + verify(kafkaOperations, times(3)).pushToKafka(any(),any(),any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is use test "createTemporalEntityFromEntity" method of HistoryService + */ + + @Test + public void createTemporalEntityFromEntityTest() { + try { + URI uri1=historyService.createTemporalEntityFromEntity(temporalPayload); + verify(kafkaOperations, times(3)).pushToKafka(any(),any(),any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is use test "delete" method of HistoryService + */ + + @Test + public void deleteTemporalByIdTest() { + List linkHeaders = null; + try { + Mockito.doReturn("https://uri.etsi.org/ngsi-ld/default-context/airQualityLevel").when(paramsResolver).expandAttribute(any(),any()); + historyService.delete("urn:ngsi-ld:testunit:151", "airQualityLevel", null, linkHeaders); + verify(kafkaOperations, times(1)).pushToKafka(any(),any(),any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + + /** + * this method is use test "addAttrib2TemporalEntity" method of HistoryService + */ + + @Test + public void addAttrib2TemporalEntityTest() { + try { + Mockito.doReturn(true).when(historyDAO).entityExists(any()); + historyService.addAttrib2TemporalEntity("urn:ngsi-ld:testunit:151", temporalPayload); + verify(kafkaOperations, times(3)).pushToKafka(any(),any(),any()); + } catch (Exception e) { + Assert.fail(); + e.printStackTrace(); + } + } + +} + \ No newline at end of file diff --git a/scorpio-broker/History/pom.xml b/scorpio-broker/History/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..abbdc6c5087d757644fff6ac0f5edfe07ecdc1bb --- /dev/null +++ b/scorpio-broker/History/pom.xml @@ -0,0 +1,16 @@ + + 4.0.0 + + eu.neclab.ngsildbroker + BrokerParent + 1.0.0-SNAPSHOT + ../BrokerParent + + History + pom + + + HistoryManager + + \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/META-INF/jandex.idx b/scorpio-broker/HistoryEntityManager/target/classes/META-INF/jandex.idx new file mode 100644 index 0000000000000000000000000000000000000000..cfc36cc06856d2a4402f4d32a6f963c418806466 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/classes/META-INF/jandex.idx differ diff --git a/scorpio-broker/HistoryEntityManager/target/classes/application-activemq.properties b/scorpio-broker/HistoryEntityManager/target/classes/application-activemq.properties new file mode 100644 index 0000000000000000000000000000000000000000..6880f1c94e37f49783085700c11a6dbbfd190a56 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/application-activemq.properties @@ -0,0 +1,25 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:61616} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=activemq +mysettings.messageconnection.options= +camel.component.activemq.broker-url=${bootstrap.servers} + + +scorpio.messaging.maxSize=1048576 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### +mp.messaging.incoming.entityretrieve.connector=smallrye-camel +mp.messaging.incoming.entityretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.entity}${mysettings.messageconnection.options} +mp.messaging.incoming.entityretrieve.auto.offset.reset=latest +mp.messaging.incoming.entityretrieve.broadcast=true +#readability block########### +mp.messaging.outgoing.histsync.connector=smallrye-camel +mp.messaging.outgoing.histsync.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.histsync}${mysettings.messageconnection.options} +mp.messaging.incoming.histsyncretrieve.connector=smallrye-camel +mp.messaging.incoming.histsyncretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.histsync}${mysettings.messageconnection.options} +mp.messaging.incoming.histsyncretrieve.auto.offset.reset=latest \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/application-kafka.properties b/scorpio-broker/HistoryEntityManager/target/classes/application-kafka.properties new file mode 100644 index 0000000000000000000000000000000000000000..4126167e580cd8a871f7c40b9e5c3bf9c4f80887 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/application-kafka.properties @@ -0,0 +1,21 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:9092} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +scorpio.messaging.maxSize=1048576 +#Kafka settings +kafka.bootstrap.servers=${bootstrap.servers} +#readability block########### +mp.messaging.incoming.entityretrieve.connector=smallrye-kafka +mp.messaging.incoming.entityretrieve.topic=${scorpio.topics.entity} +mp.messaging.incoming.entityretrieve.auto.offset.reset=latest +mp.messaging.incoming.entityretrieve.broadcast=true +#readability block########### +mp.messaging.incoming.registryretrieve.connector=smallrye-kafka +mp.messaging.incoming.registryretrieve.topic=${scorpio.topics.registry} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +mp.messaging.outgoing.histsync.connector=smallrye-kafka +mp.messaging.outgoing.histsync.topic=${scorpio.topics.histsync} +mp.messaging.incoming.histsyncretrieve.connector=smallrye-kafka +mp.messaging.incoming.histsyncretrieve.topic=${scorpio.topics.histsync} +mp.messaging.incoming.histsyncretrieve.auto.offset.reset=latest \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/application-mqtt.properties b/scorpio-broker/HistoryEntityManager/target/classes/application-mqtt.properties new file mode 100644 index 0000000000000000000000000000000000000000..2d824276b21c0f7c176db5516b022fdf34a32d02 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/application-mqtt.properties @@ -0,0 +1,24 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:1883} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=paho-mqtt5 +mysettings.messageconnection.options= +camel.component.paho-mqtt5.broker-url=tcp://${bootstrap.servers} + +scorpio.messaging.maxSize=268435455 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### +mp.messaging.incoming.entityretrieve.connector=smallrye-camel +mp.messaging.incoming.entityretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.entity}${mysettings.messageconnection.options} +mp.messaging.incoming.entityretrieve.auto.offset.reset=latest +mp.messaging.incoming.entityretrieve.broadcast=true +#readability block########### +mp.messaging.outgoing.histsync.connector=smallrye-camel +mp.messaging.outgoing.histsync.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.histsync}${mysettings.messageconnection.options} +mp.messaging.incoming.histsyncretrieve.connector=smallrye-camel +mp.messaging.incoming.histsyncretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.histsync}${mysettings.messageconnection.options} +mp.messaging.incoming.histsyncretrieve.auto.offset.reset=latest \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/application-rabbitmq.properties b/scorpio-broker/HistoryEntityManager/target/classes/application-rabbitmq.properties new file mode 100644 index 0000000000000000000000000000000000000000..f3fbb29fb1587bd9b407efc4498bfc9ea7d20750 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/application-rabbitmq.properties @@ -0,0 +1,28 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:5672} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=rabbitmq +mysettings.messageconnection.options= +#?addresses=localhost:5672 +scorpio.messaging.maxSize=134217728 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### +mp.messaging.incoming.entityretrieve.connector=smallrye-camel +mp.messaging.incoming.entityretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.entity}${mysettings.messageconnection.options} +mp.messaging.incoming.entityretrieve.auto.offset.reset=latest +mp.messaging.incoming.entityretrieve.broadcast=true + +camel.component.rabbitmq.hostname=localhost +camel.component.rabbitmq.port-number=5672 + + +#readability block########### +mp.messaging.outgoing.histsync.connector=smallrye-camel +mp.messaging.outgoing.histsync.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.histsync}${mysettings.messageconnection.options} +mp.messaging.incoming.histsyncretrieve.connector=smallrye-camel +mp.messaging.incoming.histsyncretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.histsync}${mysettings.messageconnection.options} +mp.messaging.incoming.histsyncretrieve.auto.offset.reset=latest \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/application-sqs.properties b/scorpio-broker/HistoryEntityManager/target/classes/application-sqs.properties new file mode 100644 index 0000000000000000000000000000000000000000..e067f6b2fc5cf4ffb54c6718af8ae40ef8a511e9 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/application-sqs.properties @@ -0,0 +1,22 @@ +#mysettings.kafka.bootstrap.host=${bushost:localhost} +#mysettings.kafka.bootstrap.port=${busport:5672} +#bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +#camel.component.aws2-sqs.maximum-message-size=10485760 +mysettings.messageconnection.protocol=sns-fanout +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +mysettings.messageconnection.options= +#?addresses=localhost:5672 +scorpio.messaging.maxSize=262144 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### +mp.messaging.incoming.entityretrieve.connector=smallrye-camel +mp.messaging.incoming.entityretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.entity}${mysettings.messageconnection.options} +mp.messaging.incoming.entityretrieve.auto.offset.reset=latest +mp.messaging.incoming.entityretrieve.broadcast=true +#readability block########### + + diff --git a/scorpio-broker/HistoryEntityManager/target/classes/application.properties b/scorpio-broker/HistoryEntityManager/target/classes/application.properties new file mode 100644 index 0000000000000000000000000000000000000000..708fb0a04a996e7304e9341d8f22102bb4183e17 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/application.properties @@ -0,0 +1,49 @@ +quarkus.application.name=history-entity-manager +quarkus.http.port=1040 +quarkus.log.level=INFO +quarkus.ssl.native=true +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:9092} +mysettings.postgres.host=${dbhost:localhost} +mysettings.postgres.port=${dbport:5432} +mysettings.postgres.username=${dbuser:ngb} +mysettings.postgres.password=${dbpass:ngb} +mysettings.postgres.database-name=${dbname:ngb} +mysettings.gateway.host=${gateway.host:localhost} +mysettings.gateway.port=${gateway.port:9090} + +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +jdbcurl=jdbc:postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name}?ApplicationName=${quarkus.application.name} +scorpio.gatewayurl=http://localhost:9090 +scorpio.directDB=true +scorpio.history.autorecording=true +scorpio.history.max-limit=1000 +scorpio.history.default-limit=50 +scorpio.history.batch-operations.create.max=1000 +scorpio.history.batch-operations.upsert.max=1000 +scorpio.history.batch-operations.update.max=1000 +scorpio.history.batch-operations.delete.max=1000 +scorpio.history.batch-operations.query.max=1000 +scorpio.topics.entity=ENTITY +scorpio.topics.temporal=TEMPORAL +scorpio.topics.histsync=HIST_SUB_SYNC +scorpio.topics.registry=REGISTRY +scorpio.sync.announcement-time=30s +scorpio.sync.check-time=90s +#Database settings +quarkus.datasource.db-kind=postgresql +quarkus.datasource.username=${mysettings.postgres.username} +quarkus.datasource.password=${mysettings.postgres.password} +quarkus.datasource.jdbc.url=${jdbcurl} +quarkus.datasource.reactive.url=postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name} +quarkus.datasource.reactive.shared=true +quarkus.datasource.reactive.max-size=20 +#quarkus.datasource.reactive.name=blabliblub +quarkus.flyway.migrate-at-start=true +quarkus.flyway.baseline-on-migrate=true +quarkus.flyway.connect-retries=10 +quarkus.flyway.repair-at-start=true +#Kafka settings +selfhostcorecontext=http://localhost:9090/corecontext +ngsild.corecontext=https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context-v1.3.jsonld \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190604.1__entity.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190604.1__entity.sql new file mode 100644 index 0000000000000000000000000000000000000000..a8259f8bf603988add0925c6985b979640cdc13e --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190604.1__entity.sql @@ -0,0 +1,57 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS Entity ( + id TEXT NOT NULL, + type TEXT, + data JSONB NOT NULL, + context JSONB, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + observationSpace GEOMETRY(Geometry, 4326), + operationSpace GEOMETRY(Geometry, 4326), + PRIMARY KEY (id)) +; + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER entity_extract_jsonb_fields BEFORE INSERT OR UPDATE ON entity + FOR EACH ROW EXECUTE PROCEDURE entity_extract_jsonb_fields(); + +-- create indexes for performance +CREATE INDEX i_entity_type ON entity (type); +CREATE INDEX i_entity_createdat ON entity (createdat); +CREATE INDEX i_entity_modifiedat ON entity (modifiedat); +CREATE INDEX i_entity_location ON entity USING GIST (location); +CREATE INDEX i_entity_observationspace ON entity USING GIST (observationspace); +CREATE INDEX i_entity_operationspace ON entity USING GIST (operationspace); + +-- to check if this index will be used by the database optimizer, or if it should be applied only for for certain keys +-- check https://www.postgresql.org/docs/current/static/datatype-json.html +CREATE INDEX i_entity_data ON entity USING GIN (data); diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190604.2__registry.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190604.2__registry.sql new file mode 100644 index 0000000000000000000000000000000000000000..51d767f1a6ee0368765817ebce039132d1160bd9 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190604.2__registry.sql @@ -0,0 +1,135 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS csource ( + id TEXT NOT NULL, + data JSONB NOT NULL, + type TEXT, + name TEXT, + description TEXT, + timestamp_start TIMESTAMP, + timestamp_end TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + expires TIMESTAMP, + endpoint TEXT, + internal boolean default false, + has_registrationinfo_with_attrs_only BOOL NOT NULL DEFAULT FALSE, + has_registrationinfo_with_entityinfo_only BOOL NOT NULL DEFAULT FALSE, + PRIMARY KEY (id)) +; + +-- create indexes for performance +CREATE INDEX i_csource_data ON csource USING GIN (data); +CREATE INDEX i_csource_name ON csource (name); +CREATE INDEX i_csource_timestamp_start ON csource (timestamp_start); +CREATE INDEX i_csource_timestamp_end ON csource (timestamp_end); +CREATE INDEX i_csource_location ON csource USING GIST (location); +CREATE INDEX i_csource_expires ON csource (expires); +CREATE INDEX i_csource_endpoint ON csource (endpoint); +CREATE INDEX i_csource_internal ON csource (internal); + +CREATE TABLE IF NOT EXISTS csourceinformation ( + id BIGSERIAL, + csource_id TEXT NOT NULL REFERENCES csource(id) ON DELETE CASCADE ON UPDATE CASCADE, + group_id BIGINT, + entity_id TEXT, + entity_idpattern TEXT, + entity_type TEXT, + property_id TEXT, + relationship_id TEXT, + PRIMARY KEY (id)) +; +CREATE SEQUENCE csourceinformation_group_id_seq OWNED BY csourceinformation.group_id; -- used by csource trigger +-- create indexes for performance +CREATE INDEX i_csourceinformation_csource_id ON csourceinformation (csource_id); +CREATE INDEX i_csourceinformation_entity_type_id_idpattern ON csourceinformation (entity_type, entity_id, entity_idpattern); +CREATE INDEX i_csourceinformation_entity_type_id ON csourceinformation (entity_type, entity_id); +CREATE INDEX i_csourceinformation_entity_type_idpattern ON csourceinformation (entity_type, entity_idpattern); +CREATE INDEX i_csourceinformation_property_id ON csourceinformation (property_id); +CREATE INDEX i_csourceinformation_relationship_id ON csourceinformation (relationship_id); +CREATE INDEX i_csourceinformation_group_property_relationship ON csourceinformation (group_id, property_id, relationship_id); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields_to_information_table(); \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190604.3__temporal.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190604.3__temporal.sql new file mode 100644 index 0000000000000000000000000000000000000000..bc9d603a2b3ed51124507c18e77df266bb80c91a --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190604.3__temporal.sql @@ -0,0 +1,105 @@ +BEGIN; + +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS temporalentity ( + id TEXT NOT NULL, + type TEXT, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + PRIMARY KEY (id)) +; + +CREATE TABLE IF NOT EXISTS temporalentityattrinstance ( + internalid BIGSERIAL, + temporalentity_id TEXT NOT NULL REFERENCES temporalentity(id) ON DELETE CASCADE ON UPDATE CASCADE, + attributeid TEXT NOT NULL, + instanceid TEXT, + attributetype TEXT, + value TEXT, -- object (relationship) is also stored here + geovalue GEOMETRY, + createdat TIMESTAMP, + modifiedat TIMESTAMP, + observedat TIMESTAMP, + data JSONB NOT NULL, + static BOOL NOT NULL, + PRIMARY KEY (internalid)) +; +CREATE UNIQUE INDEX i_temporalentityattrinstance_entityid_attributeid_instanceid ON temporalentityattrinstance (temporalentity_id, attributeid, instanceid); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_extract_jsonb_fields BEFORE INSERT OR UPDATE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_update_static() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + f_count integer; + BEGIN + select into f_internalid, f_count min(internalid), count(1) from temporalentityattrinstance + where temporalentity_id = OLD.temporalentity_id AND attributeid = OLD.attributeid; + IF (f_count = 1) THEN + UPDATE temporalentityattrinstance SET static = true WHERE internalid = f_internalid; + END IF; + RETURN OLD; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_update_static AFTER DELETE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_update_static(); + +-- create indexes for performance + +CREATE INDEX i_temporalentity_type ON temporalentity (type); + +CREATE INDEX i_temporalentityattrinstance_data ON temporalentityattrinstance USING GIN (data); + +COMMIT; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190611.1__sysattrs.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190611.1__sysattrs.sql new file mode 100644 index 0000000000000000000000000000000000000000..3e24ff4111eb19227e14f1b3a2b90a2d863f337e --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190611.1__sysattrs.sql @@ -0,0 +1,7 @@ +-- entity +ALTER TABLE entity ALTER data DROP NOT NULL; +ALTER TABLE entity ADD data_without_sysattrs JSONB; + +-- csource +ALTER TABLE csource ALTER data DROP NOT NULL; +ALTER TABLE csource ADD data_without_sysattrs JSONB; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ab6fe3c66556cde740eaf56948e95d4556fa7c9 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql @@ -0,0 +1,121 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190703.1__keyvalues.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190703.1__keyvalues.sql new file mode 100644 index 0000000000000000000000000000000000000000..fae8d021431fadf39732600f684e69f5aa43447a --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190703.1__keyvalues.sql @@ -0,0 +1 @@ +ALTER TABLE entity ADD kvdata JSONB; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..13b2ff5ba21ce08cd58465e6b7b9240c592f6f5c --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql @@ -0,0 +1,131 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20210206.1__tenant_function.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20210206.1__tenant_function.sql new file mode 100644 index 0000000000000000000000000000000000000000..899626ca4ed38154b7e8344e98e1e0b41459d391 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20210206.1__tenant_function.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20210206.2__tenant_field.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20210206.2__tenant_field.sql new file mode 100644 index 0000000000000000000000000000000000000000..6e5e7a7599f89a684574be098ed4a96d75068c1d --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20210206.2__tenant_field.sql @@ -0,0 +1 @@ +ALTER TABLE csource ADD tenant_id TEXT; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20210206.3__tenant_table.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20210206.3__tenant_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ea65d8e5fd612f8a5f0a3cd20d9ae081aba11f1 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20210206.3__tenant_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS tenant ( + tenant_id TEXT NOT NULL, + database_name varchar(255) UNIQUE, + PRIMARY KEY (tenant_id) +); \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20211217.1__subscription_table.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20211217.1__subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..104b878e08881a8de88364102af8b82ac5cd1a1f --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20211217.1__subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..28f87847b253efcabcac9dc467a64ea1774766fa --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS registry_subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql new file mode 100644 index 0000000000000000000000000000000000000000..b8fc302dd290e0b4a560b3b5bf0c09e5fa0a199a --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql @@ -0,0 +1,163 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}')) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}')) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getGeoJson (ldjson jsonb) +RETURNS jsonb AS $geojson$ +declare + geojson jsonb; +BEGIN + SELECT json_build_object('type', substring(ldjson#>>'{@type,0}' from 32),'coordinates',getCoordinates(ldjson#>'{https://purl.org/geojson/vocab#coordinates,0,@list}')) into geojson; + RETURN geojson; +END; +$geojson$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220124.1__scope_support.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220124.1__scope_support.sql new file mode 100644 index 0000000000000000000000000000000000000000..40f3e01afad101fbea692822b60923ab63123965 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220124.1__scope_support.sql @@ -0,0 +1,52 @@ +ALTER TABLE public.entity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes,'{-1}', (i#>'{@value}')) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..77f733a8e2015aac5d0c1190fb0b5bbd6256fd24 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..0167acd3afc6a30007b262cef29778be77ec9089 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql @@ -0,0 +1,103 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220126.1__scope_support_2.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220126.1__scope_support_2.sql new file mode 100644 index 0000000000000000000000000000000000000000..6f7224edef85a212c0e339117292b2fbd78307e1 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220126.1__scope_support_2.sql @@ -0,0 +1,128 @@ +ALTER TABLE public.csource + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +ALTER TABLE public.temporalentity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220127.1__scope_support_3.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220127.1__scope_support_3.sql new file mode 100644 index 0000000000000000000000000000000000000000..aef923126f490e1683b02763d8cb70eb7f971c26 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220127.1__scope_support_3.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..a27bbc3ad1a40b4e5e7ad176746076c6cace0d70 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql new file mode 100644 index 0000000000000000000000000000000000000000..7710a0ee88d8dfd878acef4b862d42c051bb0d56 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}'), true) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}'), true) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql new file mode 100644 index 0000000000000000000000000000000000000000..6b5247225608c9e0224d3e823dcfa651b14cdfb0 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql new file mode 100644 index 0000000000000000000000000000000000000000..64998eb0a070a7e846fb27e46173897875035395 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + IF scopes IS NULL THEN + return false; + END IF; + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql new file mode 100644 index 0000000000000000000000000000000000000000..3fcb41a0d6a8461a015ac825c6a21ec9af3476e9 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql @@ -0,0 +1,150 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; + +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..36f137d1768dfa06191276d5fbb6cdf1319b1ef6 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = FALSE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20221122.1__move161.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20221122.1__move161.sql new file mode 100644 index 0000000000000000000000000000000000000000..2bfd6cf469984dc77c1e20130833088fd0b3423d --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20221122.1__move161.sql @@ -0,0 +1,554 @@ +DROP TABLE csourceinformation; + +Alter table public.csource DROP COLUMN "location",DROP COLUMN "name", DROP COLUMN endpoint,DROP COLUMN description,DROP COLUMN timestamp_end,DROP COLUMN timestamp_start,DROP COLUMN tenant_id,DROP COLUMN internal,DROP COLUMN has_registrationinfo_with_attrs_only,DROP COLUMN has_registrationinfo_with_entityinfo_only,DROP COLUMN data_without_sysattrs,DROP COLUMN scopes, DROP COLUMN expires, DROP COLUMN type; + +ALTER TABLE PUBLIC.CSOURCE RENAME COLUMN data TO REG; + +alter table public.csource rename column id to c_id; + +ALTER TABLE PUBLIC.CSOURCE DROP CONSTRAINT csource_pkey; + +ALTER TABLE IF EXISTS public.csource + ADD CONSTRAINT unique_c_id UNIQUE (c_id); + +ALTER TABLE IF EXISTS public.csource + ADD COLUMN id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ); + +ALTER TABLE public.csource ADD PRIMARY KEY (id); + +CREATE INDEX i_csource_c_id + ON public.csource USING hash + (c_id text_pattern_ops); + +CREATE INDEX i_csource_id + ON public.csource USING btree + (id); + + +CREATE TABLE public.csourceinformation( + id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ), + cs_id bigint, + c_id text, + e_id text, + e_id_p text, + e_type text, + e_prop text, + e_rel text, + i_location GEOMETRY(Geometry, 4326), + scopes text[], + expires timestamp without time zone, + endpoint text, + tenant_id text, + headers jsonb, + reg_mode smallint, + createEntity boolean, + updateEntity boolean, + appendAttrs boolean, + updateAttrs boolean, + deleteAttrs boolean, + deleteEntity boolean, + createBatch boolean, + upsertBatch boolean, + updateBatch boolean, + deleteBatch boolean, + upsertTemporal boolean, + appendAttrsTemporal boolean, + deleteAttrsTemporal boolean, + updateAttrsTemporal boolean, + deleteAttrInstanceTemporal boolean, + deleteTemporal boolean, + mergeEntity boolean, + replaceEntity boolean, + replaceAttrs boolean, + mergeBatch boolean, + retrieveEntity boolean, + queryEntity boolean, + queryBatch boolean, + retrieveTemporal boolean, + queryTemporal boolean, + retrieveEntityTypes boolean, + retrieveEntityTypeDetails boolean, + retrieveEntityTypeInfo boolean, + retrieveAttrTypes boolean, + retrieveAttrTypeDetails boolean, + retrieveAttrTypeInfo boolean, + createSubscription boolean, + updateSubscription boolean, + retrieveSubscription boolean, + querySubscription boolean, + deleteSubscription boolean, + entityMap boolean, + canCompress boolean, + CONSTRAINT id_pkey PRIMARY KEY (id), + CONSTRAINT cs_id_fkey FOREIGN KEY (cs_id) + REFERENCES public.csource (id) MATCH SIMPLE + ON UPDATE CASCADE + ON DELETE CASCADE +); + + +CREATE INDEX IF NOT EXISTS fki_cs_id_fkey + ON public.csourceinformation(cs_id); + +CREATE INDEX i_csourceinformation_e_type + ON public.csourceinformation USING hash + (e_type text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_rel + ON public.csourceinformation USING hash + (e_rel text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_prop + ON public.csourceinformation USING hash + (e_prop text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_id + ON public.csourceinformation USING hash + (e_id text_pattern_ops); + +CREATE INDEX i_csourceinformation_i_location + ON public.csourceinformation USING gist + (i_location gist_geometry_ops_2d); + +DROP FUNCTION public.csource_extract_jsonb_fields_to_information_table cascade; +DROP Trigger csource_extract_jsonb_fields ON csource; + +CREATE TABLE temp ( + c_id text, + reg jsonb +); +INSERT INTO temp SELECT c_id, reg FROM csource; + +DELETE FROM csource; + +INSERT INTO csource SELECT c_id, reg FROM temp; + +drop table temp; + +ALTER TABLE PUBLIC.ENTITY RENAME COLUMN DATA TO ENTITY; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN DATA_WITHOUT_SYSATTRS; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN KVDATA; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OBSERVATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OPERATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN CONTEXT; + +ALTER TABLE PUBLIC.ENTITY ADD COLUMN E_TYPES TEXT[]; + +CREATE INDEX "I_entity_scopes" + ON public.entity USING gin + (scopes array_ops); + +CREATE INDEX "I_entity_types" + ON public.entity USING gin + (e_types array_ops); + +CREATE OR REPLACE FUNCTION public.entity_extract_jsonb_fields() RETURNS trigger LANGUAGE plpgsql AS $function$ + BEGIN + + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NULL AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NOT NULL AND OLD.ENTITY <> NEW.ENTITY) THEN + NEW.createdat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + IF (NEW.ENTITY@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.ENTITY ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + NEW.scopes = getScopes(NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + RETURN NEW; + END; +$function$; + +UPDATE ENTITY SET E_TYPES=array_append(E_TYPES,TYPE); + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN type; + + +CREATE OR REPLACE FUNCTION CSOURCE_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE +BEGIN + NEW.C_ID = NEW.REG#>>'{@id}'; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,false,false]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS(); + +CREATE OR REPLACE FUNCTION GETMODE (MODETEXT text) RETURNS smallint AS $registry_mode$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$registry_mode$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; + + + +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + RETURN jsonb_set(ENTITY,ARRAY[attribName], tmp); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; + + diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230108.1__subscription161.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230108.1__subscription161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c8115353d5ba16497cc30b10ef8a1fe6e0915041 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230108.1__subscription161.sql @@ -0,0 +1,18 @@ +DROP TABLE subscriptions; +DROP TABLE registry_subscriptions; + +CREATE TABLE public.subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); + +CREATE TABLE public.registry_subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230212.1__context.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230212.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..665c49dd33b0c8c5bfea4e2361c29df16fd01e7d --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230212.1__context.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS public.contexts +( + id text NOT NULL, + body jsonb NOT NULL, + kind text NOT NULL, + createdat timestamp without time zone, + PRIMARY KEY (id) +); +ALTER TABLE public.contexts alter createdat set default now(); diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230220.1__batchops161.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230220.1__batchops161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c31264330e2d38c953e892ff29b43295aedfc5ea --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230220.1__batchops161.sql @@ -0,0 +1,99 @@ +CREATE OR REPLACE FUNCTION NGSILD_CREATEBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOCR$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj['success'] = resultObj['success'] || (entity->'@id')::jsonb; + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOCR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_agg(entityId); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || (newentity->'@id')::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230309.1__datamigration161.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230309.1__datamigration161.sql new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230311.1__temporal161.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230311.1__temporal161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c502a34416bf47b00231f8be37f6dba50a7c0c55 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230311.1__temporal161.sql @@ -0,0 +1,65 @@ +ALTER TABLE PUBLIC.temporalentity ADD COLUMN E_TYPES TEXT[]; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN VALUE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN attributetype; +CREATE INDEX "I_temporalentity_types" + ON public.temporalentity USING gin + (e_types array_ops); +UPDATE temporalentity SET E_TYPES=array_append(E_TYPES,TYPE); +ALTER TABLE PUBLIC.temporalentity DROP COLUMN type; +ALTER TABLE PUBLIC.temporalentity ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN static; +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopeEntry (scopeList text[]) +RETURNS jsonb AS $scopes$ +declare + scopes jsonb; + i text; +BEGIN + scopes := '[]'::jsonb; + FOREACH i IN ARRAY scopeList LOOP + scopes = scopes || jsonb_build_object('@value', i); + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + if scopeList is null THEN + RETURN null; + END IF; + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE INDEX i_temporalentityattrinstance_attribname + ON public.temporalentityattrinstance USING hash + (attributeid text_ops); +CREATE INDEX i_temporalentity_location ON public.temporalentityattrinstance USING GIST (geovalue); \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230410.1__entitymap.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230410.1__entitymap.sql new file mode 100644 index 0000000000000000000000000000000000000000..92b172eb27cbfb372bfc729a44b1009b3946e4d5 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230410.1__entitymap.sql @@ -0,0 +1,19 @@ +CREATE TABLE public.entitymap +( + "q_token" text NOT NULL, + "entity_id" text, + "remote_hosts" jsonb, + "order_field" numeric NOT NULL +); + +CREATE INDEX i_entitymap_qtoken + ON public.entitymap USING hash + ("q_token" text_pattern_ops) +; + +CREATE TABLE public.entitymap_management +( + q_token text NOT NULL, + last_access timestamp with time zone NOT NULL, + PRIMARY KEY (q_token) +); diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230623.1__merge_patch.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230623.1__merge_patch.sql new file mode 100644 index 0000000000000000000000000000000000000000..684f327524131fa450d4e3deba24b4ab762ed4db --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230623.1__merge_patch.sql @@ -0,0 +1,36 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +if merged_json::text like '%"urn:ngsi-ld:null"%' THEN +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +end if; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..29a8a59a3c89cdad8b22af1254310c3d3f88c4c9 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql @@ -0,0 +1,29 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id'; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230705.1__core_context_store.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230705.1__core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..66bf42339d3705b05931f4a532703aa74769dc73 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230705.1__core_context_store.sql @@ -0,0 +1,300 @@ +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } +} +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql new file mode 100644 index 0000000000000000000000000000000000000000..af7e046119aac14e17ee33dc1cc6a074d723977c --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql @@ -0,0 +1,128 @@ +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230726.1__fixsubs.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230726.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..4520fbc02736783525f5e80a3980b023ce99263c --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230726.1__fixsubs.sql @@ -0,0 +1 @@ +update subscriptions set subscription=subscription-'https://uri.etsi.org/ngsi-ld/lastFailure ' \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230810.1__historyup.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230810.1__historyup.sql new file mode 100644 index 0000000000000000000000000000000000000000..06402b2bf88db1ca416edda068dc0dee6706574d --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230810.1__historyup.sql @@ -0,0 +1,39 @@ +ALTER TABLE IF EXISTS public.temporalentityattrinstance + ADD COLUMN IF NOT EXISTS location geometry; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_location + ON public.temporalentityattrinstance USING gist + (location) + WITH (buffering=auto) +; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_entityid + ON public.temporalentityattrinstance USING hash + (temporalentity_id) +; +with x as (SELECT distinct temporalentity_id as eid, geovalue, modifiedat as mat, observedat as oat, COALESCE(modifiedat, observedat) FROM temporalentityattrinstance WHERE geovalue is not null ORDER BY COALESCE(modifiedat, observedat)) UPDATE temporalentityattrinstance SET location = (SELECT x.geovalue FROM x WHERE eid = temporalentity_id and COALESCE(x.mat, x.oat) <= COALESCE(modifiedat, observedat) ORDER BY COALESCE(modifiedat, observedat) DESC limit 1) WHERE location is not null; + +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ + diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql new file mode 100644 index 0000000000000000000000000000000000000000..a17d3b8879ba7f194546f3f3ace5f41e42e9a2ec --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql @@ -0,0 +1,52 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql new file mode 100644 index 0000000000000000000000000000000000000000..82cac5034c11506304e8109eb2aa122cd408b952 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql @@ -0,0 +1,56 @@ +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + IF not attribValues ? 'https://uri.etsi.org/ngsi-ld/modifiedAt' THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + tmp := jsonb_set(tmp,Array['0','https://uri.etsi.org/ngsi-ld/modifiedAt'], Entity->'https://uri.etsi.org/ngsi-ld/modifiedAt',true); + END IF; + RETURN jsonb_set(Entity,Array[attribName,'0'], (Entity->attribName->0) || (tmp->0),true); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..833426b43969a0c3842988b8d0631e776f23cbd0 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql @@ -0,0 +1,314 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabularyProperty": "ngsi-ld:VocabularyProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230904.1__fixsubs.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230904.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..02ca66926497a6b82e4bcf2d39ad6a5e9ec38489 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20230904.1__fixsubs.sql @@ -0,0 +1 @@ +UPDATE SUBSCRIPTIONS SET SUBSCRIPTION=JSONB_SET(SUBSCRIPTION, '{@id}', ('"'||SUBSCRIPTION_ID||'"')::jsonb, true); \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql new file mode 100644 index 0000000000000000000000000000000000000000..a09bbd49ecbaa11601b43f09a7d630fcbcaf446b --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql @@ -0,0 +1,96 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', entityId, 'old', prev_entity); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql new file mode 100644 index 0000000000000000000000000000000000000000..5088d096c22fe1aa5e8b82aa5391b25dbd76a0e3 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql @@ -0,0 +1,57 @@ +DROP FUNCTION merge_json(text,jsonb); + +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; +ret := jsonb_build_array(previous_entity, merged_json); + + RETURN ret; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..795a2f213be016348be3eebc8c31bcd77c9f3a8f --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql @@ -0,0 +1,25 @@ +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE teai.internalid = new.internalid and COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql new file mode 100644 index 0000000000000000000000000000000000000000..a7437255d864ad92561c657c4e23a22cb4d951b5 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql @@ -0,0 +1,75 @@ +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb, NOOVERWRITE boolean) RETURNS jsonb AS $ENTITYOAR$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Overwriting'); + ELSIF NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + ELSE + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + + RETURN resultObj; +END; +$ENTITYOAR$ +LANGUAGE PLPGSQL; + + +ALTER TABLE temporalentityattrinstance ADD COLUMN IF NOT EXISTS static boolean \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231128.1__upsertfix.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231128.1__upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..573c77b1b3701ed5532925bada113667267c7dbe --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231128.1__upsertfix.sql @@ -0,0 +1,44 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..017016b3606fcb09d107b10217acec17bb799c2d --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql @@ -0,0 +1,363 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240212.1__merge_batchops.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240212.1__merge_batchops.sql new file mode 100644 index 0000000000000000000000000000000000000000..c5da5b65a9b6a9189123871366d0d474a238c250 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240212.1__merge_batchops.sql @@ -0,0 +1,66 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON_BATCH(b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id',newentity->'@id')::jsonb; + ELSE + resultObj['failure'] := resultObj['failure'] || jsonb_object_agg(newentity->'@id'->>0, 'Not Found'); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240319.1__context.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240319.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..38ae052ffe9a214504c3912b7b5e6c1a92b17308 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240319.1__context.sql @@ -0,0 +1,365 @@ +ALTER TABLE public.contexts add column lastUsage timestamp without time zone, add column numberOfHits bigint default 0; + +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'Hosted'); \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..19e8cf97e5ecba2781bc4d559f05787b4fd3e9a3 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql @@ -0,0 +1,663 @@ + +DROP TABLE IF EXISTS public.entitymap; +DROP TABLE IF EXISTS public.entitymap_management; +DROP FUNCTION IF EXISTS ngsild_appendbatch(jsonb); +DROP FUNCTION IF EXISTS ngsild_upsertbatch(jsonb); + +CREATE OR REPLACE FUNCTION public.ngsild_deletebatch(IN entity_ids jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, 'Not Found')); + else + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', prev_entity)); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_createbatch(IN entities jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || (entity->'@id')::jsonb); + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_appendbatch(IN entities jsonb,IN nooverwrite boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Overwriting')); + ELSIF NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + ELSE + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb); + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_upsertbatch(IN entities jsonb,IN do_replace boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE TABLE public.entitymap +( + id text, + expires_at timestamp without time zone, + last_access timestamp without time zone, + entity_map jsonb, + followup_select text, + PRIMARY KEY (id) +); + +CREATE OR REPLACE FUNCTION public.getmode(IN modetext text) + RETURNS smallint + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.updateMapIfNeeded(IN ids text[], ientityMap jsonb, entityMapToken text) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entityMapEntry jsonb; + +BEGIN + if array_length(ids, 1) = 0 or ids is null then + return ientityMap; + else + entityMapEntry := ientityMap -> 'entityMap'; + SELECT jsonb_agg(entry) INTO entityMapEntry FROM jsonb_array_elements(entityMapEntry) as entry, jsonb_object_keys(entry) as id WHERE NOT(id = ANY(ids)); + ientityMap := jsonb_set(ientityMap, '{entityMap}', entityMapEntry); + UPDATE ENTITYMAP SET LAST_ACCESS = NOW(), entity_map = ientityMap WHERE id=entityMapToken; + return ientityMap; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.getEntityMapAndEntities(IN entityMapToken text, ids text[], ilimit int, ioffset int) + RETURNS TABLE(id text, entity jsonb, parent boolean, e_types text[], entity_map jsonb) + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entitymap jsonb; + regempty boolean; + noRootLevelRegEntry boolean; + queryText text; +BEGIN + if ids is null or array_length(ids, 1) = 0 then + UPDATE ENTITYMAP SET LAST_ACCESS = NOW() WHERE ENTITYMAP.id=entityMapToken RETURNING ENTITYMAP.ENTITY_MAP INTO entitymap; + if entitymap is null then + RAISE EXCEPTION 'Nonexistent ID --> %', entityMapToken USING ERRCODE = 'S0001'; + end if; + regempty := entitymap -> 'regEmptyOrNoRegEntryAndNoLinkedQuery'; + noRootLevelRegEntry := entitymap -> 'noRootLevelRegEntryAndLinkedQuery'; + + if regempty or noRootLevelRegEntry then + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY > $2), ' + || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ' limit $3), X as (SELECT D0.ID as id, max(D0.ordinality) as maxOrdinality FROM D0 GROUP BY D0.ID), C as (SELECT updateMapIfNeeded(ids.aggIds, $4, $5) as entity_map FROM (SELECT ARRAY_AGG(a.id) as aggIds FROM a LEFT JOIN X ON a.id = X.ID WHERE X.ID IS NULL AND a.ordinality <= X.maxOrdinality) as ids)' + || (entitymap ->> 'finalselect')) using (entitymap->'entityMap'), ioffset, ilimit, entitymap, entityMapToken; + else + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY between $2 and ($2 + $3) and entityIdEntry.value ? ''@none''), C as (SELECT $4 as entity_map), ' || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ')' ||(entitymap ->> 'finalselect')) using entitymap->'entityMap', ioffset, ilimit, entitymap; + end if; + else + if regempty or noRootLevelRegEntry then + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + else + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + end if; + end if; +END; +$BODY$; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS entitymap; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS cancompress; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN queryEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN createEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN updateEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN deleteEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN retrieveEntityMap boolean; + +UPDATE public.csourceinformation SET queryEntityMap = false,createEntityMap = false, updateEntityMap = false, deleteEntityMap = false,retrieveEntityMap = false; + +CREATE OR REPLACE FUNCTION public.getoperations(IN operationjson jsonb) + RETURNS boolean[] + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + operations[37] = true; + operations[38] = true; + operations[39] = true; + operations[40] = true; + operations[41] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'queryEntityMap' THEN + operations[37] = true; + WHEN 'createEntityMap' THEN + operations[38] = true; + WHEN 'updateEntityMap' THEN + operations[39] = true; + WHEN 'deleteEntityMap' THEN + operations[40] = true; + WHEN 'retrieveEntityMap' THEN + operations[41] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.csourceinformation_extract_jsonb_fields() + RETURNS trigger + LANGUAGE 'plpgsql' + VOLATILE + COST 100 +AS $BODY$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..474a2ef4780544dc6697fefec62900f6c79bc1ed --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql @@ -0,0 +1,834 @@ +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id',newentity->>'@id', 'old', previous_entity)); + ELSE + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$BODY$; + +UPDATE contexts SET body = '{ + + "@context": { + + "@version": 1.1, + + "@protected": true, + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + + "geojson": "https://purl.org/geojson/vocab#", + + "id": "@id", + + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + + "AttributeList": "ngsi-ld:AttributeList", + + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + + "Date": "ngsi-ld:Date", + + "DateTime": "ngsi-ld:DateTime", + + "EntityType": "ngsi-ld:EntityType", + + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + + "EntityTypeList": "ngsi-ld:EntityTypeList", + + "Feature": "geojson:Feature", + + "FeatureCollection": "geojson:FeatureCollection", + + "GeoProperty": "ngsi-ld:GeoProperty", + + "GeometryCollection": "geojson:GeometryCollection", + + "JsonProperty": "ngsi-ld:JsonProperty", + + "LanguageProperty": "ngsi-ld:LanguageProperty", + + "LineString": "geojson:LineString", + + "ListProperty": "ngsi-ld:ListProperty", + + "ListRelationship": "ngsi-ld:ListRelationship", + + "MultiLineString": "geojson:MultiLineString", + + "MultiPoint": "geojson:MultiPoint", + + "MultiPolygon": "geojson:MultiPolygon", + + "Notification": "ngsi-ld:Notification", + + "Point": "geojson:Point", + + "Polygon": "geojson:Polygon", + + "Property": "ngsi-ld:Property", + + "Relationship": "ngsi-ld:Relationship", + + "Subscription": "ngsi-ld:Subscription", + + "TemporalProperty": "ngsi-ld:TemporalProperty", + + "Time": "ngsi-ld:Time", + + "VocabProperty": "ngsi-ld:VocabProperty", + + "accept": "ngsi-ld:accept", + + "attributeCount": "attributeCount", + + "attributeDetails": "attributeDetails", + + "attributeList": { + + "@id": "ngsi-ld:attributeList", + + "@type": "@vocab" + + }, + + "attributeName": { + + "@id": "ngsi-ld:attributeName", + + "@type": "@vocab" + + }, + + "attributeNames": { + + "@id": "ngsi-ld:attributeNames", + + "@type": "@vocab" + + }, + + "attributeTypes": { + + "@id": "ngsi-ld:attributeTypes", + + "@type": "@vocab" + + }, + + "attributes": { + + "@id": "ngsi-ld:attributes", + + "@type": "@vocab" + + }, + + "attrs": "ngsi-ld:attrs", + + "avg": { + + "@id": "ngsi-ld:avg", + + "@container": "@list" + + }, + + "bbox": { + + "@container": "@list", + + "@id": "geojson:bbox" + + }, + + "cacheDuration": "ngsi-ld:cacheDuration", + + "containedBy": "ngsi-ld:isContainedBy", + + "contextSourceAlias": "ngsi-ld:contextSourceAlias", + + "contextSourceExtras": { + + "@id": "ngsi-ld:contextSourceExtras", + + "@type": "@json" + + }, + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + + "contextSourceTimeAt": { + + "@id": "ngsi-ld:contextSourceTimeAt", + + "@type": "DateTime" + + }, + + "contextSourceUptime": "ngsi-ld:contextSourceUptime", + + "cooldown": "ngsi-ld:cooldown", + + "coordinates": { + + "@container": "@list", + + "@id": "geojson:coordinates" + + }, + + "createdAt": { + + "@id": "ngsi-ld:createdAt", + + "@type": "DateTime" + + }, + + "csf": "ngsi-ld:csf", + + "data": "ngsi-ld:data", + + "dataset": { + + "@id": "ngsi-ld:hasDataset", + + "@container": "@index" + + }, + + "datasetId": { + + "@id": "ngsi-ld:datasetId", + + "@type": "@id" + + }, + + "deletedAt": { + + "@id": "ngsi-ld:deletedAt", + + "@type": "DateTime" + + }, + + "description": "http://purl.org/dc/terms/description", + + "detail": "ngsi-ld:detail", + + "distinctCount": { + + "@id": "ngsi-ld:distinctCount", + + "@container": "@list" + + }, + + "endAt": { + + "@id": "ngsi-ld:endAt", + + "@type": "DateTime" + + }, + + "endTimeAt": { + + "@id": "ngsi-ld:endTimeAt", + + "@type": "DateTime" + + }, + + "endpoint": "ngsi-ld:endpoint", + + "entities": "ngsi-ld:entities", + + "pick": "ngsi-ld:pick", + + "omit": "ngsi-ld:omit", + + "jsonKeys": "ngsi-ld:jsonKeys", + + "entity": "ngsi-ld:entity", + + "entityCount": "ngsi-ld:entityCount", + + "entityId": { + + "@id": "ngsi-ld:entityId", + + "@type": "@id" + + }, + + "entityList": { + + "@id": "ngsi-ld:entityList", + + "@container": "@list" + + }, + + "entityMap": "ngsi-ld:hasEntityMap", + + "error": "ngsi-ld:error", + + "errors": "ngsi-ld:errors", + + "expiresAt": { + + "@id": "ngsi-ld:expiresAt", + + "@type": "DateTime" + + }, + + "features": { + + "@container": "@set", + + "@id": "geojson:features" + + }, + + "format": "ngsi-ld:format", + + "geoQ": "ngsi-ld:geoQ", + + "geometry": "geojson:geometry", + + "geoproperty": "ngsi-ld:geoproperty", + + "georel": "ngsi-ld:georel", + + "idPattern": "ngsi-ld:idPattern", + + "information": "ngsi-ld:information", + + "instanceId": { + + "@id": "ngsi-ld:instanceId", + + "@type": "@id" + + }, + + "isActive": "ngsi-ld:isActive", + + "join": "ngsi-ld:join", + + "joinLevel": "ngsi-ld:hasJoinLevel", + + "json": { + + "@id": "ngsi-ld:hasJSON", "@type": "@json" + + }, + + "jsons": { + + "@id": "ngsi-ld:jsons", + + "@container": "@list" + + }, + + "key": "ngsi-ld:hasKey", + + "lang": "ngsi-ld:lang", + + "languageMap": { + + "@id": "ngsi-ld:hasLanguageMap", + + "@container": "@language" + + }, + + "languageMaps": { + + "@id": "ngsi-ld:hasLanguageMaps", + + "@container": "@list" + + }, + + "lastFailure": { + + "@id": "ngsi-ld:lastFailure", + + "@type": "DateTime" + + }, + + "lastNotification": { + + "@id": "ngsi-ld:lastNotification", + + "@type": "DateTime" + + }, + + "lastSuccess": { + + "@id": "ngsi-ld:lastSuccess", + + "@type": "DateTime" + + }, + + "linkedMaps": "ngsi-ld:linkedMaps", + + "localOnly": "ngsi-ld:localOnly", + + "location": "ngsi-ld:location", + + "management": "ngsi-ld:management", + + "managementInterval": "ngsi-ld:managementInterval", + + "max": { + + "@id": "ngsi-ld:max", + + "@container": "@list" + + }, + + "min": { + + "@id": "ngsi-ld:min", + + "@container": "@list" + + }, + + "mode": "ngsi-ld:mode", + + "modifiedAt": { + + "@id": "ngsi-ld:modifiedAt", + + "@type": "DateTime" + + }, + + "notification": "ngsi-ld:notification", + + "notificationTrigger": "ngsi-ld:notificationTrigger", + + "notifiedAt": { + + "@id": "ngsi-ld:notifiedAt", + + "@type": "DateTime" + + }, + + "notifierInfo": "ngsi-ld:notifierInfo", + + "notUpdated": "ngsi-ld:notUpdated", + + "object": { + + "@id": "ngsi-ld:hasObject", + + "@type": "@id" + + }, + + "objectList": { + + "@id": "ngsi-ld:hasObjectList", + + "@container": "@list" + + }, + + "objects": { + + "@id": "ngsi-ld:hasObjects", + + "@container": "@list" + + }, + + "objectsLists": { + + "@id": "ngsi-ld:hasObjectsLists", + + "@container": "@list" + + }, + + "objectType": { + + "@id": "ngsi-ld:hasObjectType", + + "@type": "@vocab" + + }, + + "observationInterval": "ngsi-ld:observationInterval", + + "observationSpace": "ngsi-ld:observationSpace", + + "observedAt": { + + "@id": "ngsi-ld:observedAt", + + "@type": "DateTime" + + }, + + "operationSpace": "ngsi-ld:operationSpace", + + "operations": "ngsi-ld:operations", + + "previousJson": { + + "@id": "ngsi-ld:hasPreviousJson", + + "@type": "@json" + + }, + + "previousLanguageMap": { + + "@id": "ngsi-ld:hasPreviousLanguageMap", + + "@container": "@language" + + }, + + "previousObject": { + + "@id": "ngsi-ld:hasPreviousObject", + + "@type": "@id" + + }, + + "previousObjectList": { + + "@id": "ngsi-ld:hasPreviousObjectList", + + "@container": "@list" + + }, + + "previousValue": "ngsi-ld:hasPreviousValue", + + "previousValueList": { + + "@id": "ngsi-ld:hasPreviousValueList", + + "@container": "@list" + + }, + + "previousVocab": { + + "@id": "ngsi-ld:hasPreviousVocab", + + "@type": "@vocab" + + }, + + "properties": "geojson:properties", + + "propertyNames": { + + "@id": "ngsi-ld:propertyNames", + + "@type": "@vocab" + + }, + + "q": "ngsi-ld:q", + + "reason": "ngsi-ld:reason", + + "receiverInfo": "ngsi-ld:receiverInfo", + + "refreshRate": "ngsi-ld:refreshRate", + + "registrationId": "ngsi-ld:registrationId", + + "registrationName": "ngsi-ld:registrationName", + + "relationshipNames": { + + "@id": "ngsi-ld:relationshipNames", + + "@type": "@vocab" + + }, + + "scope": "ngsi-ld:scope", + + "scopeQ": "ngsi-ld:scopeQ", + + "showChanges": "ngsi-ld:showChanges", + + "startAt": { + + "@id": "ngsi-ld:startAt", + + "@type": "DateTime" + + }, + + "status": "ngsi-ld:status", + + "stddev": { + + "@id": "ngsi-ld:stddev", + + "@container": "@list" + + }, + + "subscriptionId": { + + "@id": "ngsi-ld:subscriptionId", + + "@type": "@id" + + }, + + "subscriptionName": "ngsi-ld:subscriptionName", + + "success": { + + "@id": "ngsi-ld:success", + + "@type": "@id" + + }, + + "sum": { + + "@id": "ngsi-ld:sum", + + "@container": "@list" + + }, + + "sumsq": { + + "@id": "ngsi-ld:sumsq", + + "@container": "@list" + + }, + + "sysAttrs": "ngsi-ld:sysAttrs", + + "temporalQ": "ngsi-ld:temporalQ", + + "tenant": { + + "@id": "ngsi-ld:tenant", + + "@type": "@id" + + }, + + "throttling": "ngsi-ld:throttling", + + "timeAt": { + + "@id": "ngsi-ld:timeAt", + + "@type": "DateTime" + + }, + + "timeInterval": "ngsi-ld:timeInterval", + + "timeout": "ngsi-ld:timeout", + + "timeproperty": "ngsi-ld:timeproperty", + + "timerel": "ngsi-ld:timerel", + + "timesFailed": "ngsi-ld:timesFailed", + + "timesSent": "ngsi-ld:timesSent", + + "title": "http://purl.org/dc/terms/title", + + "totalCount": { + + "@id": "ngsi-ld:totalCount", + + "@container": "@list" + + }, + + "triggerReason": "ngsi-ld:triggerReason", + + "typeList": { + + "@id": "ngsi-ld:typeList", + + "@type": "@vocab" + + }, + + "typeName": { + + "@id": "ngsi-ld:typeName", + + "@type": "@vocab" + + }, + + "typeNames": { + + "@id": "ngsi-ld:typeNames", + + "@type": "@vocab" + + }, + + "unchanged": "ngsi-ld:unchanged", + + "unitCode": "ngsi-ld:unitCode", + + "updated": "ngsi-ld:updated", + + "uri": "ngsi-ld:uri", + + "value": "ngsi-ld:hasValue", + + "valueList": { + + "@id": "ngsi-ld:hasValueList", + + "@container": "@list" + + }, + + "valueLists": { + + "@id": "ngsi-ld:hasValueLists", + + "@container": "@list" + + }, + + "values": { + + "@id": "ngsi-ld:hasValues", + + "@container": "@list" + + }, + + "vocab": { + + "@id": "ngsi-ld:hasVocab", + + "@type": "@vocab" + + }, + + "vocabs": { + + "@id": "ngsi-ld:hasVocabs", + + "@container": "@list" + + }, + + "watchedAttributes": { + + "@id": "ngsi-ld:watchedAttributes", + + "@type": "@vocab" + + }, + + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + + } + +} + +'::jsonb WHERE id=')$%^&'; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..90d4785b7e7d4b82c6ac1bf4c88ac56043f995bc --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql @@ -0,0 +1,963 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_point(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE +BEGIN + if not geo_json_entry ? '@list' or jsonb_array_length(geo_json_entry #> '{@list}') != 2 then + RAISE EXCEPTION 'Invalid geo point for geo json' USING ERRCODE = 'SB006'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.VALIDATE_GEO_JSON(IN GEO_JSON_ENTRY JSONB) RETURNS VOID LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + geo_type text; + value jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.clean_ngsi_ld_null(IN json_entry jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + json_type text; + result jsonb; + value jsonb; + cleaned jsonb; + key text; +BEGIN + json_type = jsonb_typeof(json_entry); + if json_type = 'array' then + result = '[]'::jsonb; + for value in select * from jsonb_array_elements(json_entry) loop + cleaned = clean_ngsi_ld_null(value); + if cleaned is not null then + result = result || cleaned; + end if; + end loop; + if jsonb_array_length(result) = 0 then + return null; + end if; + return result; + elsif json_type = 'object' then + result = '{}'; + for key, value in Select * from jsonb_each(json_entry) loop + if value::text != '"urn:ngsi-ld:null"' then + result = jsonb_set(result, '{key}', value); + end if; + end loop; + if result::text = '{}' then + return null; + end if; + return result; + else + if json_entry::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return json_entry; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_json(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_type text; + old_type text; + todelete jsonb; + deleted integer; + i integer; + index integer; + value jsonb; + value2 jsonb; + merged_json jsonb; + key text; +BEGIN + new_type = jsonb_typeof(new_attrib); + old_type = jsonb_typeof(old_attrib); + if old_attrib is null or new_type != old_type then + old_attrib := new_attrib; + end if; + todelete = '[]'::jsonb; + if new_type = 'array' then + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + for i in 0 .. jsonb_array_length(new_attrib) loop + if new_attrib ->> i = 'urn:ngsi-ld:null' then + todelete = todelete || i; + end if; + end loop; + deleted = 0; + if array_length(todelete) > 0 then + for i in select * from jsonb_array_elements(todelete) loop + new_attrib = new_attrib - (i - deleted); + deleted = deleted + 1; + end loop; + end if; + return new_attrib; + end if; + index = 0; + deleted = 0; + for value in select * from jsonb_array_elements(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + index := index + 1; + continue; + end if; + value2 = old_attrib[index - deleted]; + merged_json = merge_has_json(value, value2); + if merged_json is null then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - deleted)]::text[], merged_json); + end if; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + elsif new_type = 'object' then + for key, value in Select * from jsonb_each(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - key; + continue; + end if; + merged_json = merge_has_json(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + continue; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end loop; + if old_attrib::text = '{}' then + return null; + end if; + return old_attrib; + else + if new_attrib::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return new_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_vocab(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_language_map(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + index integer; + remove boolean; + value2 jsonb; + ln_found boolean; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in Select * from jsonb_array_elements(new_attrib) loop + if value ->> '@language' = '@none' and value ->> '@value' = 'urn:ngsi-ld:null' then + return null; + else + index = 0; + ln_found = false; + remove = false; + for value2 in Select * from jsonb_array_elements(old_attrib) loop + if value2 ->> '@language' = value->> '@language' then + ln_found = true; + if value ->> '@value' = 'urn:ngsi-ld:null' then + remove = true; + end if; + exit; + end if; + index = index + 1; + end loop; + if ln_found then + if remove then + old_attrib = old_attrib - index; + else + old_attrib = jsonb_set(old_attrib, ARRAY[index,'@value']::text[], value->'@value'); + end if; + else + old_attrib = old_attrib || value; + end if; + end if; + end loop; + RETURN old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_geo(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + key text; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,https://purl.org/geojson/vocab#coordinates,0,@list,0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + for value in select * from jsonb_array_elements(new_attrib) loop + PERFORM validate_geo_json(value); + end loop; + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://purl.org/geojson/vocab#coordinates' then + if value2 #>> '{0,@list,0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + elsif key = '@type' then + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + PERFORM validate_geo_json(old_attrib[(index - removed)]); + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib #> '{0,@list}'; + if old_attrib is null then + old_attrib = new_attrib; + end if; + old_value_list = old_attrib #> '{0,@list}'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed)]::text[], (old_attrib #> ARRAY[0,'@list',(index-removed)]::text[]) - key); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed),key]::text[], merged_json); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB004'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; + +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib -> '@list'; + if old_attrib is null then + old_attrib := new_attrib; + end if; + old_value_list = old_attrib -> '@list'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], value2); + end if; + elsif key = '@list' then + merged_json = merge_has_value_list(value, old_value_list[index - removed]); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + + else + merged_json = merge_has_value(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + arr_idx integer; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + arr_idx := index - removed; + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - arr_idx; + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], value2); + end if; + else + arr_idx := index - removed; + merged_json = merge_has_value(value2, old_attrib #> ARRAY[arr_idx,key]::text[]); + if merged_json is null then + old_attrib[arr_idx] = old_attrib[arr_idx] - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_ATTRIB_INSTANCE(IN NEW_ATTRIB JSONB, + + IN OLD_ATTRIB JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + attrib_type TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + new_attrib := new_attrib - 'https://uri.etsi.org/ngsi-ld/createdAt'; + attrib_type := old_attrib #>> '{@type,0}'; + if attrib_type != new_attrib #>> '{@type,0}' then + RAISE EXCEPTION 'Cannot change type of an attribute' USING ERRCODE = 'SB001'; + end if; + if attrib_type = 'https://uri.etsi.org/ngsi-ld/Property' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/Relationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValueList' then + merged_json = merge_has_value_list(value[0], old_attrib #> '{key,0}'); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListRelationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectList' then + merged_json = merge_has_object_list(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/GeoProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value_geo(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/LanguageProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasLanguageMap' then + merged_json = merge_has_language_map(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/VocabProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasVocab' then + merged_json = merge_has_vocab(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/JsonProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasJSON' then + merged_json = merge_has_json(value #> ARRAY[0,'@value']::text[], old_attrib #> ARRAY[key,0,'@value']::text[]); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key,0,'@value']::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + else + RAISE EXCEPTION 'Unknown type of an attribute %, %, %', attrib_type, old_attrib, new_attrib USING ERRCODE = 'SB002'; + end if; + return old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + else + if new_dataset_id is null then + deleted := deleted || 'null'; + else + deleted := deleted || new_dataset_id; + end if; + end if; + else + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_JSON(IN A text,IN B JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL UNSAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + deleted JSONB; + updated JSONB; +BEGIN + +Select entity into previous_entity from entity where id =a; +if previous_entity is null then + RAISE EXCEPTION 'Entity not found.' USING ERRCODE = '02000'; +end if; +Select entity into merged_json from entity where id =a; +deleted := '{}'; +updated := '{}'; +-- Iterate through keys in JSON B +FOR key, value IN SELECT * FROM JSONB_EACH(b) +LOOP + if key = '@id' or key = 'https://uri.etsi.org/ngsi-ld/createdAt'then + continue; + elsif key = '@type' then + value2 = merged_json -> key; + WITH combined AS ( + SELECT jsonb_array_elements(value) AS elem + UNION + SELECT jsonb_array_elements(value2) AS elem + ) + SELECT jsonb_agg(elem) into value2 AS merged_array FROM combined; + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' then + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value); + else + value2 = merged_json -> key; + value2 = merge_attrib(value, value2); + if value2 ->'result' = 'null'::jsonb or jsonb_array_length(value2 ->'result') = 0 then + merged_json = merged_json - key; + deleted = jsonb_set(deleted, ARRAY[key]::text[], '["@all"]'::jsonb); + else + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2 -> 'result'); + if jsonb_array_length(value2 -> 'deleted') != 0 then + if deleted ? key then + deleted = jsonb_set(deleted, ARRAY[key], ((deleted -> key) || (value2 -> 'deleted'))); + else + deleted = jsonb_set(deleted, ARRAY[key], ((value2 -> 'deleted'))); + end if; + end if; + + if jsonb_array_length(value2 -> 'updated') != 0 then + if updated ? key then + updated = jsonb_set(updated, ARRAY[key], ((updated -> key) || (value2 -> 'updated'))); + else + updated = jsonb_set(updated, ARRAY[key], ((value2 -> 'updated'))); + end if; + end if; + + end if; + + + end if; +END LOOP; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + +RETURN jsonb_build_object('old', previous_entity, 'new', merged_json, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; + newentity jsonb; + resultObj jsonb; + entityId text; + index integer; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + index := 0; + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + entityId := newentity->>'@id'; + IF entityId is null then + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object('no id row nr ' || index, 'No entity id provided')); + else + BEGIN + ret := MERGE_JSON(entityId, newentity); + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', ret -> 'old', 'new', ret -> 'new', 'deleted', ret -> 'deleted', 'updated', ret -> 'updated')::jsonb); + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entityId, SQLSTATE)); + END; + end if; + index := index + 1; + END LOOP; + RETURN resultObj; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..f9eea8fca78af88cd9cca10817372067d2fec0e3 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql @@ -0,0 +1,69 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_json(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + geo_type text; + value jsonb; + value2 jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPoint' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi point update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiLineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPolygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240922.1__mergeattrib.sql b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240922.1__mergeattrib.sql new file mode 100644 index 0000000000000000000000000000000000000000..98411df52c5cfd8208b71983d6624d4bfd7452b9 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/classes/db/migration/V20240922.1__mergeattrib.sql @@ -0,0 +1,71 @@ +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || ('null'::jsonb); + else + updated := updated || to_jsonb(new_dataset_id); + end if; + else + if new_dataset_id is null then + deleted := deleted || ('null'::jsonb); + else + deleted := deleted || to_jsonb(new_dataset_id); + end if; + end if; + else + if new_dataset_id is null then + updated := updated || ('null'::jsonb); + else + updated := updated || to_jsonb(new_dataset_id); + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/controller/HistoryController.class b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/controller/HistoryController.class new file mode 100644 index 0000000000000000000000000000000000000000..0fab4a2cdf4293b71ca3da59c5e101f17056f5fe Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/controller/HistoryController.class differ diff --git a/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingBase.class b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingBase.class new file mode 100644 index 0000000000000000000000000000000000000000..65144e739d3582ef4813c02f2a6ac443e56b558e Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingBase.class differ diff --git a/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingByteArray.class b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingByteArray.class new file mode 100644 index 0000000000000000000000000000000000000000..c4a912b1b2c64e674926d909ca34513ebf7ca7df Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingByteArray.class differ diff --git a/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingInMemory.class b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingInMemory.class new file mode 100644 index 0000000000000000000000000000000000000000..a2487b68f5daaeb36bdad7461610109db91d5349 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingInMemory.class differ diff --git a/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingString.class b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingString.class new file mode 100644 index 0000000000000000000000000000000000000000..61c7f82fe101a9b94e4b84de70570c167612de57 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingString.class differ diff --git a/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySync.class b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySync.class new file mode 100644 index 0000000000000000000000000000000000000000..f90791ee7b57bf10cbfcd988e081e2199e985ccc Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySync.class differ diff --git a/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySyncByteArray.class b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySyncByteArray.class new file mode 100644 index 0000000000000000000000000000000000000000..75bf0010c759d444a47ae802d62db9c653e9baaa Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySyncByteArray.class differ diff --git a/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySyncString.class b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySyncString.class new file mode 100644 index 0000000000000000000000000000000000000000..c5714acbf8e4f77e342b3453087ee07ecc63c953 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySyncString.class differ diff --git a/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/repository/HistoryDAO.class b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/repository/HistoryDAO.class new file mode 100644 index 0000000000000000000000000000000000000000..d61013ee51fa5609281a963e71a9c56ac6b28819 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/repository/HistoryDAO.class differ diff --git a/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/service/HistoryEntityService.class b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/service/HistoryEntityService.class new file mode 100644 index 0000000000000000000000000000000000000000..18c957c659125a7f9bd1eeccd81eea5e8d5213cd Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/classes/eu/neclab/ngsildbroker/historyentitymanager/service/HistoryEntityService.class differ diff --git a/scorpio-broker/HistoryEntityManager/target/history-entity-manager-5.0.5-SNAPSHOT.jar b/scorpio-broker/HistoryEntityManager/target/history-entity-manager-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..008488687e389e0fc498705e9a9d2b4db3ef160d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/history-entity-manager-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/maven-archiver/pom.properties b/scorpio-broker/HistoryEntityManager/target/maven-archiver/pom.properties new file mode 100644 index 0000000000000000000000000000000000000000..652a237cbafe656d541fe45df5914ca40f67604b --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/maven-archiver/pom.properties @@ -0,0 +1,5 @@ +#Generated by Maven +#Fri Jan 03 03:28:09 UTC 2025 +groupId=eu.neclab.ngsildbroker +artifactId=history-entity-manager +version=5.0.5-SNAPSHOT diff --git a/scorpio-broker/HistoryEntityManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst b/scorpio-broker/HistoryEntityManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..7b4463fbd49136f55a4434ef96f8aa7063c0fca0 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst @@ -0,0 +1,10 @@ +eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySync.class +eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingString.class +eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingInMemory.class +eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySyncString.class +eu/neclab/ngsildbroker/historyentitymanager/repository/HistoryDAO.class +eu/neclab/ngsildbroker/historyentitymanager/controller/HistoryController.class +eu/neclab/ngsildbroker/historyentitymanager/service/HistoryEntityService.class +eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingBase.class +eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySyncByteArray.class +eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingByteArray.class diff --git a/scorpio-broker/HistoryEntityManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst b/scorpio-broker/HistoryEntityManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..810fbfaca235b1ff165a6ddc585eeec1dfe68915 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst @@ -0,0 +1,10 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryEntityManager/src/main/java/eu/neclab/ngsildbroker/historyentitymanager/controller/HistoryController.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryEntityManager/src/main/java/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingBase.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryEntityManager/src/main/java/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingByteArray.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryEntityManager/src/main/java/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingInMemory.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryEntityManager/src/main/java/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistoryMessagingString.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryEntityManager/src/main/java/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySync.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryEntityManager/src/main/java/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySyncByteArray.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryEntityManager/src/main/java/eu/neclab/ngsildbroker/historyentitymanager/messaging/HistorySyncString.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryEntityManager/src/main/java/eu/neclab/ngsildbroker/historyentitymanager/repository/HistoryDAO.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryEntityManager/src/main/java/eu/neclab/ngsildbroker/historyentitymanager/service/HistoryEntityService.java diff --git a/scorpio-broker/HistoryEntityManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst b/scorpio-broker/HistoryEntityManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..bd80dded1a9baeda2cfb9d4b4d2ad735dfb7b11e --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst @@ -0,0 +1,3 @@ +eu/neclab/ngsildbroker/historymanager/controller/HistoryControllerTest.class +eu/neclab/ngsildbroker/historymanager/controller/EntityOperationsHistoryControllerTest.class +eu/neclab/ngsildbroker/historymanager/controller/CustomProfile.class diff --git a/scorpio-broker/HistoryEntityManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst b/scorpio-broker/HistoryEntityManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..256a99b419cd845e0e25b9fc0f4dabc07bb57e4b --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst @@ -0,0 +1,4 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryEntityManager/src/test/java/eu/neclab/ngsildbroker/historymanager/controller/CustomProfile.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryEntityManager/src/test/java/eu/neclab/ngsildbroker/historymanager/controller/EntityOperationsHistoryControllerTest.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryEntityManager/src/test/java/eu/neclab/ngsildbroker/historymanager/controller/HistoryControllerTest.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryEntityManager/src/test/java/eu/neclab/ngsildbroker/historymanager/service/HistoryServiceTest.java diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/app/history-entity-manager-5.0.5-SNAPSHOT.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/app/history-entity-manager-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..66a8454fc0074dff44f414f6cbb2f2ed3f6b7917 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/app/history-entity-manager-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..9d5d452f4a66d1165b27d4d604d2bddaad0e4cd7 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d9c956f0001cff936eb20c373592dcc6510f5ff Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f9f583af71764a7127e4da73677ed5c8dea97bfb Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0447153ded16f86775cdcb337f8576579c1ac0db Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..243fa559c2be0f61720e1a333e43d4da2e8b5516 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ce4cbac674f51eb2063ff475a0e70484b25ace9f Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..55063658b2d25baf50b6a3963c508233f695b3de Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..4a7a8e4b537b532f4f58717f3366e9cb6f0ff0ce Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b320c9d42b93b26981927265e5dc62b85e73263f Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d2520242b28d239ba3b138c17e65f8ae6103a787 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..0648aa9f6a2ea3b3ddd083471f1c8dacbb4bbc4f Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..134f669d9fbee6ca61a9c9bb36227376ed97d0a2 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3365ab16bb0cd576f88b808e9af9b024111a5070 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..778ea557232b0fb41df34d63353c219a371660b2 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fd5c901f4bb0e8ed59d2d040740021a7c5cf1b19 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f74846f3f35230a9e14c0bf98e4cccfec593b4c9 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ee6d31a6a025d92f9fb05550483fb6b415b7f066 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..52835b2540d6cb8aaffe22bd5c7c24203cc77538 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..dba6fdd40e73a6dfc06a462bc2646c2bfb7e6d5a Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c13bcb9104e907195d13bbb4f998c1e5594cc2e8 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..34be9026612b9553f55f5f1aed148fc96a9d8fcb Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3750b8c1cfae96e79305618c78653ac5fb9b6de5 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..327ee706dcf46e428dd6339b9744e77941e5b498 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c47febca79456ba4d389bbb46ea0e11e6a41bede Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6ff58aed923740c9a4f639b9a512dcfd08df921 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3aa01f1ee73130e4983d2c3520220b29995c4ccc Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..288bf56e1b4f5c5a2bb2152887c5ef12e6cddeae Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..f4f1af783043658e2b3879560b6e1ff0b8db66a1 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..85cefa6d7b5b644fb99075f6621ca60beb350cd9 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..c4b2b999ce72dcfdd9b63d6a62d0a2d7e2037034 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1d6d1c50a344b45ba375d53775b70ad5aac58f7 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..18e59c8c41de73e02e77298e981fa7e3051e4b5d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a8f5dfe1b83122a9f085da1aa7fff451ed88e783 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b56dc751c1cc7dff75ed80ccbb45f027058e8ce Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar new file mode 100644 index 0000000000000000000000000000000000000000..10d10b62a49ad095f56d620620ee7eaa5d2fc62d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..3595c4f9be5c0ce779f8dd611e7f6917ca518f5d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a3336373b7aea1700b62d9aa60a15493586c3e8a Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..71079166b7bc51455b1e1d18ea4e5e942b3ae89f Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cbb633729cae09e5d65aefccd7b63c697f42b5cb Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eecfb70406fbaca61c7c9e5a549f77cbef2e849b Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a690bed6986df8a510ee4f05b2079264db7d71af Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5023670d73e75c539b0af285d35c4e9edaef2211 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eb3c2b0b82115e9820f781e944312b4c19b25ed4 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..705f285c9348d57ec059c73b90ed9836f4db6aa4 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..684cc24821451b65b9fc36376131490a9d03b37c Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..36f71a550601f4842536ad1a62a187a30c60eb69 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..b4f917b9687dc231c8f3b4cf3fc0a95e616846ea Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..878cc677337985f59ed9f4bb5cfcdb8ca4d0acbe Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..9afa6d70ae20b7082f786920e918fd70c138a5b3 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b626ceafb52c318581529fafbd1d33889f25c20 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..043052d031f59a3b289cc2bb7dda9b8352c58e11 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..316bdec6ab1a6ea4cd4dc33c9217cdf96e4c9049 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..73e95705d3152472d11fa2a5690626b652ff280a Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..7192fa0e49a0762ad44c218215e6197dd12197b2 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..4ecfb5dbec2f25d201de0a83d1143729830d49bd Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..072d06d46d38bcb7a63efb38075bf79ea111caf1 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e19ecdbd4d53bc0c09246f73926aaae70fe9493 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..08c1d314876730dc6f82ba65e741f03b8719609d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..3427ee30e0e35e876eda5e5f1bed695d8c2636e9 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5facdd8e2f6e4f4fa3ceecc16962b3b15b9627c2 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..575355b83ce351f70a605a46ef5fa7f75f956835 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..39f5787fb3d458de0fd9f575345176b60e050961 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..a1e0f39535831b6f914098b776513c284c50b351 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..9637836909b5e59f7a83f88412a8571b4641f56b Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..93bc0ad010aab29e0a328d2ffafaeb60c82b3ecf Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..f06de9ed2afaa3668bccce11dcfe8924446817d0 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9f5b95529a5c7ac6044ffa4c60e6d412a1597104 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..9edc36a8a46a29b0e5a507fbd092a0e6629b8db7 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fe2973fcc47beca49e8e4061fe60a78c2a27102 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..0aedb22b8903d6eb70232c1e310266e0465b5f54 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..084d172fc3512a05145851e902ab1377a9d8b814 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..b740a4a827c0808baeda7112a5c40158e4eba664 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f99a4fc63360f9f5d1f0d751928b0b93d62ac81 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..75e3104ca9d784fa7fcaca27ecd239c02ac6e241 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3eab7c30fefb9c141226da5595a5faf9ff83d462 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..196cc61fca2b75593a6ac4cd38744fb6fcdc33bd Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..070baec335bcc35a79bd1ddf7ee52e809331c4c3 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..80221e424d28060c8620964fef488107c54f4e94 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c607cbdf717926be37b821d11c9c4b0ed9a2802 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..17262bba0ff1a57d9e893d832f8268389adfe340 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..30ef9922f2c8fe0c6a28cd108f33f9632fe82411 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b3a47cef61d53f9997148c1502dbb4ec313a6a47 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5b530ad1e484936df857104583ffcaf78b1715e3 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..96950282b9778bf3a3bd26d4c9125ffdc0f95c96 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b447f8aeb87c42e4489c0db15f30f93d4707fa45 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..11a94c16234d6e460c6e66a840ac7976e756c458 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d325421d690f179c4626844df98915a36e74ea Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0089e0cb796e6183029df4847b1b600d974a8b1a Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..6abfe2c3ec3923ddea06c4e6f4a4a440d72fa649 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar new file mode 100644 index 0000000000000000000000000000000000000000..99c263dc3f71a215a4a85901538f1dedacb51acf Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d9dbbe1b73c8f5435b7309f77520f6cbff82600c Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5d3b9086c238e3c3dbb2af68ce47f52eb269c664 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c03be12b0b2c7549c2db53ed275e45cd87e8a4dc Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0bf27883a28a42e50d17bca78ce368d79acaea13 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f440dcb8f735c055768cd95c2bc4457c4cbbb48 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e7adeb414fcb6108390beabc72d94e7f933e055d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fba12107dee341bd5894a0e05deb09280561e517 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..48ec53f0dba9ea734d83d21f7d793bb2178bb8d1 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff161c12c4949a473a1d2ed1f597d6338337a321 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..577b2bb43f4750ecb4f5c1b9ba952d3866bcea22 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..10a28407ba7c16165164c35be0f9657812349a84 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d0ee5b30dbe7a8cb00c15600142ef6041a97f44 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e3c7750dd2c267fea46f510636ae576bf26894a Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a38e1850efbb73aaf248318253b11e0d0a64a50e Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9e257c119006bed0c77c8b8a6b2e05adfc71dde6 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a85e9e76c2870fca83485b31334a81a02f12781 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..548a6405f93dd3254c1123ecbcce2f96f6995e30 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f65840267108fe2b704976f14e3c17bd02290e20 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..68b053f16299ba0a9bcd80c6e795c2c9e0b186a1 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..89aaf8be3250033348a32ac75c1edc52d9d264ac Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..add785757c2a958f86e3a20c6034d46a38427d83 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e6f98b3a199f27f5c5ab47000862c0558116f3ef Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa88a2ec11f998225646b08795fedc215e530e6f Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cfd8c45d73d6d6e8da9c8fd24e04b2dcb86bff83 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6438a41b88c1605a994451cda5657f0a52669ad Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4bb4f9457907978ea011d1fc390c6ab2e6217888 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..494d0bb23d9ae6619c2a8f2e18ec458676f39316 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8ec4c28f2f004dc6d7b8ca0d36eeaa4b79f4a4bd Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d5bfe9c8b3c66f75327e6ce621da822c6b67fc Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..90607ec6492a8667ea6d2c39f77389e4083a2017 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8f8612553283b7f00d9dc7ab7eb5313b2f1eca2 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b0c51a8f29ef87a1c6390dd98c2e1e6aa0114f7 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1dcea88498ba439a3ac32e291067331c99331ace Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..92721ae048041c7dbddda0c88d417d740f3c299d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3199cb971aa88ab386df667ad1d16d74d3bee22f Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c0ebec7fa19a8e96489714a2f9986017522becfb Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4696de7fd6cd077015cad5212b223de1dcc80307 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..04bc9cc78782c3b7bc78553481b94a10a02f8f9d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..14d1445dba365c803f94f24f6fcb8467d13cf6e5 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5859e692a03f1173aa1f2b272a844c94d24c12e3 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c8cb76d3f0d9c0853168e740754f4d79042a105c Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b038ad4a83ac6df0e0a8f71265877dad2ded43ab Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c63764d56c7a95f9abf5230b80fae1a414651b61 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df5d82b54357c697c0d95fd9b8db277facfe0cde Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..07d3721a9c5c5e314e5eee21ec168139bcfed6c4 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8f7f3668d903b867c021d47fd08c02c45374e29c Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a774ecd7174486d0c6d077c388f1021c3b2ef05 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f90f28b0d6afa7518cd3095f64d8dbd9fd56b77d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ba5add79fc5dee32ae8d3dfc0ffeebe75541b603 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..d32bd30fa7bfa74da1330810bc3eb3c07f174385 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..cc828adf40879d957e266f0b781f7bb3d864af07 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..08a59e5bb2d5572b0fdbe693f48a3f0a5073d4f0 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b821617242260f35360fb4247a01d66dea563004 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9908f1069718a85031db676224857e26dc7401f0 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d09f435a3cd1ae0395b4926c78311be6b276e0c2 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9bf5d12b59534df460f6ccb01dad8c8ffcd8a542 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3c5639431a5d51ef16ef94d82225effb271e8467 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d55e5f07ddae6c98b08fbff4848461fdbb2adf73 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..760d92fc11fe3672030cdd7c71416ad75b8ac775 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7504902f7621ee1764e207e2f5b2ffdeaa22e261 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..11867e294ef35ca6e9a6a82fb52dd52821c35d48 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e268ff4ef4543b76568d66444df43ce45751d3bc Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..662cf14d5c427f09ea259c484befd4cc2d328b2d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..417d8b59b8ce5998e21e0275beb878ac50008bd8 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d3ab26e2476843ebb0d27c983443ff9a7ba3fee8 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f08a939bd2b90b9f87dc3f15ab88e15e8c48087e Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..abb08bb751b0e037476fc15d4da40a44d5432966 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57bc48ad35c44ab05ee4f8575f552ea0dd3b04c2 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..98d6fa439ad69c1dc9436d76d43e3a34636729ad Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..373471c5b69b1468fff9a50d614c1b84fa69fbdd Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff59cf0f6f80ce2f55e1f08afbd1750c7df6e2fc Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..49443391feae0e4b18c570a3a007db6187108031 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..723430b5a2624a415e2754bad5f69fd151e71dd3 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6169a6b468ab05de2c7962fc64ce8567f46c444 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b7cc8f08c3aa4845416101f90521f8736eef6973 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f1d68c985deb0d3b5c865a91ca3489e94d623dcf Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..99dc47ca15667630606af6d41f5e616acd66b168 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c7f07c892fbe634044ba8b8333aac2195d84a4b0 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..97a49b08f496d01d6a50e3b2455476c5b23ba50d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ece3eb2d02159abb38ed4ea6b8fc4b599616c79e Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9ed9c88db7be8648c5b501b787a1addf9f0c1981 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b8cc43f8b9896a24d916d7bfdbdd02e396d7db1 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b232bcabeac47c30ac02371188d10bdb5d214a88 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8701dd9d98c6c7d61ea547823759f909474f7a9d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..473ce54348410627710157274dd8d2fdfa39c0df Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..16255cf4093cd574a00574f812ee5109478fe9a4 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c3c712e90ce6ebdae145eec147d6d4a50bd0fe53 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..354030f4393f7f920b6e03ff894d8e5ea8727797 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..284a20484992c849fd9a5fb0f83f8f639f5d562f Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..467756a558a61103b2dc767e0833e561540ed8fd Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..477c706ef15c0f6622b55e64d3cb623cb3fd3ebd Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..305727ab77b9dcd6df44f8868efc6859b51a419e Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8c00aa871496b3293e1c008449162d449b9f64ea Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8a4fe6ed7c025ce502f81bbee92b36230457d647 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c322385c3404a0196e8d509f2f7cb892fcb0883a Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..2e158629507bc39be5c13b1b31fa3563e0ac44b1 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..b16bdbbc285bd848e6dac42208acfa3d6bfa6fab Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..0d015d54205c084ad31609cc2909853fee83476a Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..34c1d4394b358572a2c79b543cb7d094b0ba1b8d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..316080f3b56134e8a821a50511b0e831886c9184 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5edfd71412f1470c93366d821b353ebaa85ecac3 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a3214715591141e2d6fbb0f5b71f52126a571a5 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a92e099d4fc25523e2830fa9b8181d319c9369a7 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a5b5b5fc36bb88093fd25a30b2d1d7fbe9e3cba Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..dbcbea90f1434f04d12c2039f9213c704d82ec31 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a5d9089734b0a7061dc14c4afc35884cc507636 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1e7da4be43dd1a10393608d1aff9c7a87460461 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..80670a1b87a7680fbac8c690d599361f8dd8d2ea Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..48242829bc38133a9cdcd36f8b2a9eebc53ab91a Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bffe4865f5835839900292dce062bb2f24921d76 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d8c4c56c7943f2fb3fcf2207f77f3bb7dd623550 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57b4cba6199355aa6c8fc9ad96e01f122e8613ff Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c4fe13ea86560b5be41f0602c8cef57dac8fed8 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d3e9ba64ae347356342ce7bd81694068426ce4 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..29fb4d66863e0ed67c991f16d7224d8e1b88ed98 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..83006f2ec04934050d5d41b033761f8c6f93c80a Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..66bc6931d673d72136bf7634d9d0653576bb05de Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fb9dadfc4843c7b3bd48544fe9c39068014ae49 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57eb121f34753b359ed328615b12d1335cdef5e0 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..666c3e4d030b41a58901b540d76547698cea41b4 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f5768a039af60a776a24b738659227810a5fc563 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b2d092cdf3c4b145df42a44b536ed0f8a604574c Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..08f353ce447773c14714390536f2abde814dff5b Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df32ff7e11ad182102a805c854bfc71cf2fec90e Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a93a240a399ca96f7f6298a849de2fdc996f066d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8b05ebef8ea87d46d09eac112c1e8a5a9a549fc Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..eab628714f1e46bbabce09f59d5149071cb7b8ee Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bcf2ba147eb2b4d9c75a637aed2c96628a8369cd Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e12f44ed11522b1f45d2935972336fb6852cb26d Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7b0a2bef9c92372239c8136f4bacad2e44297a95 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..97e7161abe0534de78448dde1d119002283ced55 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a4e88280321db778bbdff6fce3aed8e6ce6e4980 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..da9302ff29a560b5f10d3184f25d699fe2d9c186 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..2bb7c07363c9a44ea63fe96c827a34e296b8188c Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..9a8ac703dcd1b00c37aa6f8dc9a8a9b3d42145f6 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar new file mode 100644 index 0000000000000000000000000000000000000000..f0bdebeb94bce461c49ded7e28d6e6c63bf6a367 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b3c8ff0adc42f592363a883cd691d292aada837 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa7fdabb307af8221e7e0a1526f2c97d6ba35ec4 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..49953e8fa25ed42f4127011561a6e84869fe5d82 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..d139727d04b6b6acdfcb520566c8c60cbbcb7fb1 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6077c71e50c276649060a8fac39f6384fa67019 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d2e53ffaa9f0685843fd2b35fe18afd543249a Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c9b38d9c315bae8eb8c7d4eeacb26a8f2ca16085 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..50933a1fdc4d2f285542845bb89f9b34cef192f2 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..45581840b5a7e858949ee6198f8a1f7bd772fb32 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar new file mode 100644 index 0000000000000000000000000000000000000000..e3432492d5d204a1745e138497d9ede28ffb854e Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d2ade3e92ab51059b64ce3026377e1bd5991777 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ed4bb45df0ea4a6cb61ce9c7247b6de29ab4f98a Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..816ea17eb611606001129921b297615bcd2419fd Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..ffc43704764cd535486c28401c92f2548bd52c5f Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..0e9fcc76760c4fc3fe5fbce69a4a37783d912a11 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..d7987d7c1b270f153557179abaf61c87ed62e875 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d218bacf88c766dba04ef14130fd7e69181ffed Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e90b7796c1a5d7264bc613050e05f20a674c010 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa3474de5d48916f2ded456deab5671a1375645b Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..657afffffdf6324a62474b9499dd50cf9529609f Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..26cbae47e5f0db078fc43501ed3d1b4f95a1befa Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a66a92ba5247b5a1f8e201d11643e4fb11c2a8c Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..8da196f20fde587682295ac0c90f31ba4ab23815 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..31cf0b60867242d385d764dcea99adadf7ed6ded Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..89c644b8e286e9da107d81de25f1be0fe6447607 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..d530cd128ec0d314490c0e1e5ef68479cd23d366 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..091b4d13a417d635f5a1d7a42b482f88a6f3bf65 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..e58c483f97589c9712eda2273a37e25344573390 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar new file mode 100644 index 0000000000000000000000000000000000000000..a2cb8020a5afda869b487e2f9d172dcd1e9795bf Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..511ff3211d9b29bce06c3576ddcf0139fc874bb0 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..7707e5878b8525da8750949186a3ab1056ecb5c5 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..275dd5700a389ba1902a02d49e465157942368ce Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus-app-dependencies.txt b/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus-app-dependencies.txt new file mode 100644 index 0000000000000000000000000000000000000000..0aa0b1a4e39e9ac14c9739186a382f5a7784a7d6 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus-app-dependencies.txt @@ -0,0 +1,258 @@ +com.aayushatharva.brotli4j:brotli4j::jar:1.16.0 +com.aayushatharva.brotli4j:native-linux-x86_64::jar:1.16.0 +com.aayushatharva.brotli4j:service::jar:1.16.0 +com.cronutils:cron-utils::jar:9.2.1 +com.fasterxml.jackson.core:jackson-annotations::jar:2.17.2 +com.fasterxml.jackson.core:jackson-core::jar:2.17.2 +com.fasterxml.jackson.core:jackson-databind::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-toml::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-yaml::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jdk8::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jsr310::jar:2.17.2 +com.fasterxml.jackson.module:jackson-module-parameter-names::jar:2.17.2 +com.github.ben-manes.caffeine:caffeine::jar:3.1.5 +com.github.filosganga:geogson-core::jar:1.2.21 +com.github.filosganga:geogson-jts::jar:1.2.21 +com.github.luben:zstd-jni::jar:1.5.6-3 +com.google.code.gson:gson::jar:2.11.0 +com.google.errorprone:error_prone_annotations::jar:2.30.0 +com.google.guava:failureaccess::jar:1.0.1 +com.google.guava:guava::jar:33.2.1-jre +com.google.j2objc:j2objc-annotations::jar:2.8 +com.ongres.scram:client::jar:2.1 +com.ongres.scram:common::jar:2.1 +com.ongres.stringprep:saslprep::jar:1.1 +com.ongres.stringprep:stringprep::jar:1.1 +com.vividsolutions:jts-core::jar:1.14.0 +commons-codec:commons-codec::jar:1.17.1 +commons-io:commons-io::jar:2.16.1 +eu.neclab.ngsildbroker:commons::jar:5.0.5-SNAPSHOT +io.agroal:agroal-api::jar:2.5 +io.agroal:agroal-narayana::jar:2.5 +io.agroal:agroal-pool::jar:2.5 +io.github.crac:org-crac::jar:0.1.3 +io.netty:netty-buffer::jar:4.1.111.Final +io.netty:netty-codec-dns::jar:4.1.111.Final +io.netty:netty-codec-haproxy::jar:4.1.111.Final +io.netty:netty-codec-http2::jar:4.1.111.Final +io.netty:netty-codec-http::jar:4.1.111.Final +io.netty:netty-codec-mqtt::jar:4.1.111.Final +io.netty:netty-codec-socks::jar:4.1.111.Final +io.netty:netty-codec::jar:4.1.111.Final +io.netty:netty-common::jar:4.1.111.Final +io.netty:netty-handler-proxy::jar:4.1.111.Final +io.netty:netty-handler::jar:4.1.111.Final +io.netty:netty-resolver-dns::jar:4.1.111.Final +io.netty:netty-resolver::jar:4.1.111.Final +io.netty:netty-transport-classes-epoll::jar:4.1.111.Final +io.netty:netty-transport-classes-kqueue::jar:4.1.111.Final +io.netty:netty-transport-native-epoll:linux-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-kqueue:osx-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-unix-common::jar:4.1.111.Final +io.netty:netty-transport::jar:4.1.111.Final +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api-incubator::jar:2.5.0-alpha +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api::jar:2.5.0 +io.opentelemetry.semconv:opentelemetry-semconv::jar:1.26.0-alpha +io.opentelemetry:opentelemetry-api-incubator::jar:1.39.0-alpha +io.opentelemetry:opentelemetry-api::jar:1.39.0 +io.opentelemetry:opentelemetry-context::jar:1.39.0 +io.quarkiverse.loggingmanager:quarkus-logging-manager::jar:3.1.2 +io.quarkiverse.systemd.notify:quarkus-systemd-notify::jar:1.0.2 +io.quarkus.arc:arc::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common-types::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-jackson::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-vertx::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive::jar:3.14.1 +io.quarkus.security:quarkus-security::jar:2.1.0 +io.quarkus.vertx.utils:quarkus-vertx-utils::jar:3.14.1 +io.quarkus:quarkus-agroal::jar:3.14.1 +io.quarkus:quarkus-arc::jar:3.14.1 +io.quarkus:quarkus-bootstrap-runner::jar:3.14.1 +io.quarkus:quarkus-cache-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-cache::jar:3.14.1 +io.quarkus:quarkus-caffeine::jar:3.14.1 +io.quarkus:quarkus-classloader-commons::jar:3.14.1 +io.quarkus:quarkus-container-image-docker-common::jar:3.14.1 +io.quarkus:quarkus-container-image-docker::jar:3.14.1 +io.quarkus:quarkus-container-image::jar:3.14.1 +io.quarkus:quarkus-core::jar:3.14.1 +io.quarkus:quarkus-credentials::jar:3.14.1 +io.quarkus:quarkus-datasource-common::jar:3.14.1 +io.quarkus:quarkus-datasource::jar:3.14.1 +io.quarkus:quarkus-development-mode-spi::jar:3.14.1 +io.quarkus:quarkus-flyway-postgresql::jar:3.14.1 +io.quarkus:quarkus-flyway::jar:3.14.1 +io.quarkus:quarkus-fs-util::jar:0.0.10 +io.quarkus:quarkus-info-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-info::jar:3.14.1 +io.quarkus:quarkus-jackson::jar:3.14.1 +io.quarkus:quarkus-jdbc-postgresql::jar:3.14.1 +io.quarkus:quarkus-jsonp::jar:3.14.1 +io.quarkus:quarkus-kafka-client::jar:3.14.1 +io.quarkus:quarkus-messaging-kafka::jar:3.14.1 +io.quarkus:quarkus-messaging-kotlin::jar:3.14.1 +io.quarkus:quarkus-messaging::jar:3.14.1 +io.quarkus:quarkus-mutiny-reactive-streams-operators::jar:3.14.1 +io.quarkus:quarkus-mutiny::jar:3.14.1 +io.quarkus:quarkus-narayana-jta::jar:3.14.1 +io.quarkus:quarkus-netty::jar:3.14.1 +io.quarkus:quarkus-reactive-datasource::jar:3.14.1 +io.quarkus:quarkus-reactive-pg-client::jar:3.14.1 +io.quarkus:quarkus-rest-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson::jar:3.14.1 +io.quarkus:quarkus-rest::jar:3.14.1 +io.quarkus:quarkus-scheduler-api::jar:3.14.1 +io.quarkus:quarkus-scheduler-common::jar:3.14.1 +io.quarkus:quarkus-scheduler-kotlin::jar:3.14.1 +io.quarkus:quarkus-scheduler-spi::jar:3.14.1 +io.quarkus:quarkus-scheduler::jar:3.14.1 +io.quarkus:quarkus-security-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-smallrye-context-propagation::jar:3.14.1 +io.quarkus:quarkus-smallrye-health::jar:3.14.1 +io.quarkus:quarkus-smallrye-metrics::jar:3.14.1 +io.quarkus:quarkus-smallrye-openapi::jar:3.14.1 +io.quarkus:quarkus-swagger-ui::jar:3.14.1 +io.quarkus:quarkus-tls-registry::jar:3.14.1 +io.quarkus:quarkus-transaction-annotations::jar:3.14.1 +io.quarkus:quarkus-vertx-http::jar:3.14.1 +io.quarkus:quarkus-vertx-latebound-mdc-provider::jar:3.14.1 +io.quarkus:quarkus-vertx::jar:3.14.1 +io.quarkus:quarkus-virtual-threads::jar:3.14.1 +io.smallrye.common:smallrye-common-annotation::jar:2.5.0 +io.smallrye.common:smallrye-common-classloader::jar:2.5.0 +io.smallrye.common:smallrye-common-constraint::jar:2.5.0 +io.smallrye.common:smallrye-common-cpu::jar:2.5.0 +io.smallrye.common:smallrye-common-expression::jar:2.5.0 +io.smallrye.common:smallrye-common-function::jar:2.5.0 +io.smallrye.common:smallrye-common-io::jar:2.5.0 +io.smallrye.common:smallrye-common-net::jar:2.5.0 +io.smallrye.common:smallrye-common-os::jar:2.5.0 +io.smallrye.common:smallrye-common-ref::jar:2.5.0 +io.smallrye.common:smallrye-common-vertx-context::jar:2.5.0 +io.smallrye.config:smallrye-config-common::jar:3.9.1 +io.smallrye.config:smallrye-config-core::jar:3.9.1 +io.smallrye.config:smallrye-config::jar:3.9.1 +io.smallrye.reactive:mutiny-reactive-streams-operators::jar:2.6.2 +io.smallrye.reactive:mutiny-smallrye-context-propagation::jar:2.6.2 +io.smallrye.reactive:mutiny-zero-flow-adapters::jar:1.1.0 +io.smallrye.reactive:mutiny-zero::jar:1.1.0 +io.smallrye.reactive:mutiny::jar:2.6.2 +io.smallrye.reactive:smallrye-mutiny-vertx-auth-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-bridge-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-core::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-mqtt::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-pg-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-runtime::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-sql-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-uri-template::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web::jar:3.14.0 +io.smallrye.reactive:smallrye-reactive-converter-api::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-converter-mutiny::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-messaging-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-camel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-health::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-in-memory::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-otel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-provider::jar:4.24.0 +io.smallrye.reactive:vertx-mutiny-generator::jar:3.14.0 +io.smallrye:jandex::jar:3.2.2 +io.smallrye:smallrye-context-propagation-api::jar:2.1.2 +io.smallrye:smallrye-context-propagation-jta::jar:2.1.2 +io.smallrye:smallrye-context-propagation-storage::jar:2.1.2 +io.smallrye:smallrye-context-propagation::jar:2.1.2 +io.smallrye:smallrye-fault-tolerance-vertx::jar:6.4.0 +io.smallrye:smallrye-health-api::jar:4.1.0 +io.smallrye:smallrye-health-provided-checks::jar:4.1.0 +io.smallrye:smallrye-health::jar:4.1.0 +io.smallrye:smallrye-metrics::jar:4.0.0 +io.smallrye:smallrye-open-api-core::jar:3.10.0 +io.vertx:vertx-auth-common::jar:4.5.9 +io.vertx:vertx-bridge-common::jar:4.5.9 +io.vertx:vertx-codegen::jar:4.5.9 +io.vertx:vertx-core::jar:4.5.9 +io.vertx:vertx-mqtt::jar:4.5.9 +io.vertx:vertx-pg-client::jar:4.5.9 +io.vertx:vertx-sql-client::jar:4.5.9 +io.vertx:vertx-uri-template::jar:4.5.9 +io.vertx:vertx-web-client::jar:4.5.9 +io.vertx:vertx-web-common::jar:4.5.9 +io.vertx:vertx-web::jar:4.5.9 +jakarta.activation:jakarta.activation-api::jar:2.1.3 +jakarta.annotation:jakarta.annotation-api::jar:3.0.0 +jakarta.el:jakarta.el-api::jar:5.0.1 +jakarta.enterprise:jakarta.enterprise.cdi-api::jar:4.1.0 +jakarta.enterprise:jakarta.enterprise.lang-model::jar:4.1.0 +jakarta.inject:jakarta.inject-api::jar:2.0.1 +jakarta.interceptor:jakarta.interceptor-api::jar:2.2.0 +jakarta.json:jakarta.json-api::jar:2.1.3 +jakarta.resource:jakarta.resource-api::jar:2.1.0 +jakarta.transaction:jakarta.transaction-api::jar:2.0.1 +jakarta.ws.rs:jakarta.ws.rs-api::jar:3.1.0 +jakarta.xml.bind:jakarta.xml.bind-api::jar:4.0.2 +org.apache.camel.quarkus:camel-quarkus-core::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-reactive-streams::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-smallrye-reactive-messaging::jar:3.14.0 +org.apache.camel:camel-api::jar:4.7.0 +org.apache.camel:camel-base-engine::jar:4.7.0 +org.apache.camel:camel-base::jar:4.7.0 +org.apache.camel:camel-componentdsl::jar:4.7.0 +org.apache.camel:camel-core-catalog::jar:4.7.0 +org.apache.camel:camel-core-engine::jar:4.7.0 +org.apache.camel:camel-core-languages::jar:4.7.0 +org.apache.camel:camel-core-model::jar:4.7.0 +org.apache.camel:camel-core-processor::jar:4.7.0 +org.apache.camel:camel-core-reifier::jar:4.7.0 +org.apache.camel:camel-endpointdsl::jar:4.7.0 +org.apache.camel:camel-main::jar:4.7.0 +org.apache.camel:camel-management-api::jar:4.7.0 +org.apache.camel:camel-microprofile-config::jar:4.7.0 +org.apache.camel:camel-reactive-streams::jar:4.7.0 +org.apache.camel:camel-support::jar:4.7.0 +org.apache.camel:camel-tooling-model::jar:4.7.0 +org.apache.camel:camel-util-json::jar:4.7.0 +org.apache.camel:camel-util::jar:4.7.0 +org.apache.camel:camel-xml-jaxp-util::jar:4.7.0 +org.apache.commons:commons-lang3::jar:3.14.0 +org.apache.httpcomponents:httpclient-cache::jar:4.5.14 +org.apache.httpcomponents:httpclient::jar:4.5.14 +org.apache.httpcomponents:httpcore::jar:4.4.16 +org.apache.kafka:kafka-clients::jar:3.7.1 +org.checkerframework:checker-qual::jar:3.46.0 +org.eclipse.microprofile.config:microprofile-config-api::jar:3.1 +org.eclipse.microprofile.context-propagation:microprofile-context-propagation-api::jar:1.3 +org.eclipse.microprofile.health:microprofile-health-api::jar:4.0.1 +org.eclipse.microprofile.metrics:microprofile-metrics-api::jar:4.0.1 +org.eclipse.microprofile.openapi:microprofile-openapi-api::jar:3.1.1 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-api::jar:3.0 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-core::jar:3.0 +org.eclipse.parsson:parsson::jar:1.1.7 +org.flywaydb:flyway-core::jar:10.17.1 +org.flywaydb:flyway-database-postgresql::jar:10.17.1 +org.glassfish.expressly:expressly::jar:5.0.0 +org.jboss.invocation:jboss-invocation::jar:2.0.0.Final +org.jboss.logging:commons-logging-jboss-logging::jar:1.0.0.Final +org.jboss.logging:jboss-logging-annotations::jar:3.0.1.Final +org.jboss.logging:jboss-logging::jar:3.6.0.Final +org.jboss.logmanager:jboss-logmanager::jar:3.0.6.Final +org.jboss.narayana.jta:narayana-jta::jar:7.0.2.Final +org.jboss.narayana.jts:narayana-jts-integration::jar:7.0.2.Final +org.jboss.slf4j:slf4j-jboss-logmanager::jar:2.0.0.Final +org.jboss.threads:jboss-threads::jar:3.6.1.Final +org.jboss:jboss-transaction-spi::jar:8.0.0.Final +org.jctools:jctools-core::jar:4.0.5 +org.locationtech.jts:jts-core::jar:1.18.2 +org.locationtech.spatial4j:spatial4j::jar:0.8 +org.lz4:lz4-java::jar:1.8.0 +org.noggit:noggit::jar:0.8 +org.postgresql:postgresql::jar:42.7.4 +org.reactivestreams:reactive-streams::jar:1.0.4 +org.slf4j:slf4j-api::jar:2.0.6 +org.wildfly.common:wildfly-common::jar:1.7.0.Final +org.xerial.snappy:snappy-java::jar:1.1.10.5 +org.yaml:snakeyaml::jar:2.2 diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus-run.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus-run.jar new file mode 100644 index 0000000000000000000000000000000000000000..287a4b10cc706dc8015788986a8631539d20830e Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus-run.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus/generated-bytecode.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus/generated-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..ee40fec8c83e8d1779661cf8f0b70a3d99ce95a5 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus/generated-bytecode.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus/quarkus-application.dat b/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus/quarkus-application.dat new file mode 100644 index 0000000000000000000000000000000000000000..9a0d21205d896bc4e269dda3d91dec2737a0eb61 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus/quarkus-application.dat differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus/transformed-bytecode.jar b/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus/transformed-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..10f41534226482ca0ebf1e0a34dea24447e9f9e9 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/quarkus-app/quarkus/transformed-bytecode.jar differ diff --git a/scorpio-broker/HistoryEntityManager/target/quarkus-artifact.properties b/scorpio-broker/HistoryEntityManager/target/quarkus-artifact.properties new file mode 100644 index 0000000000000000000000000000000000000000..e983df782491e32ca3af79a3d16e46c4f4af6a17 --- /dev/null +++ b/scorpio-broker/HistoryEntityManager/target/quarkus-artifact.properties @@ -0,0 +1,4 @@ +# Generated by Quarkus - Do not edit manually +metadata.container-image=scorpiobroker/history-entity-manager\:5.0.5-SNAPSHOT +metadata.pull-required=false +type=jar-container diff --git a/scorpio-broker/HistoryEntityManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/CustomProfile.class b/scorpio-broker/HistoryEntityManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/CustomProfile.class new file mode 100644 index 0000000000000000000000000000000000000000..dc405581d0c7f147ac1ec4aa3b0a5766fa53b846 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/CustomProfile.class differ diff --git a/scorpio-broker/HistoryEntityManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/EntityOperationsHistoryControllerTest.class b/scorpio-broker/HistoryEntityManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/EntityOperationsHistoryControllerTest.class new file mode 100644 index 0000000000000000000000000000000000000000..130a8dc321f3358765729c66a8c5d19fcae15ed1 Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/EntityOperationsHistoryControllerTest.class differ diff --git a/scorpio-broker/HistoryEntityManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/HistoryControllerTest.class b/scorpio-broker/HistoryEntityManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/HistoryControllerTest.class new file mode 100644 index 0000000000000000000000000000000000000000..dbae88065552e1f5cd48b836be07ac4025337b3e Binary files /dev/null and b/scorpio-broker/HistoryEntityManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/HistoryControllerTest.class differ diff --git a/scorpio-broker/HistoryQueryManager/target/classes/META-INF/jandex.idx b/scorpio-broker/HistoryQueryManager/target/classes/META-INF/jandex.idx new file mode 100644 index 0000000000000000000000000000000000000000..ec1757a8cbdd904d8f32771bd3242aa42f12b7eb Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/classes/META-INF/jandex.idx differ diff --git a/scorpio-broker/HistoryQueryManager/target/classes/application-activemq.properties b/scorpio-broker/HistoryQueryManager/target/classes/application-activemq.properties new file mode 100644 index 0000000000000000000000000000000000000000..909c3d6a3decea53385a0f85ccc5f533ab348a2a --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/application-activemq.properties @@ -0,0 +1,16 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:61616} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=activemq +mysettings.messageconnection.options= +camel.component.activemq.broker-url=${bootstrap.servers} + + +scorpio.messaging.maxSize=1048576 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true + + diff --git a/scorpio-broker/HistoryQueryManager/target/classes/application-kafka.properties b/scorpio-broker/HistoryQueryManager/target/classes/application-kafka.properties new file mode 100644 index 0000000000000000000000000000000000000000..3518ce96be461f3737f3a0854230c6dd657d86fa --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/application-kafka.properties @@ -0,0 +1,11 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:9092} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +scorpio.messaging.maxSize=1048576 +#Kafka settings +kafka.bootstrap.servers=${bootstrap.servers} +#readability block########### +mp.messaging.incoming.registryretrieve.connector=smallrye-kafka +mp.messaging.incoming.registryretrieve.topic=${scorpio.topics.registry} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/application-mqtt.properties b/scorpio-broker/HistoryQueryManager/target/classes/application-mqtt.properties new file mode 100644 index 0000000000000000000000000000000000000000..dc3b077c5526fac55287a03092c17bc8ab9a8e62 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/application-mqtt.properties @@ -0,0 +1,15 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:1883} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=paho-mqtt5 +mysettings.messageconnection.options= +camel.component.paho-mqtt5.broker-url=tcp://${bootstrap.servers} + +scorpio.messaging.maxSize=268435455 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true + + diff --git a/scorpio-broker/HistoryQueryManager/target/classes/application-rabbitmq.properties b/scorpio-broker/HistoryQueryManager/target/classes/application-rabbitmq.properties new file mode 100644 index 0000000000000000000000000000000000000000..55045f21d369f5e2c9c308f6e4e0bec3de9852aa --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/application-rabbitmq.properties @@ -0,0 +1,16 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:5672} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=rabbitmq +mysettings.messageconnection.options= +#?addresses=localhost:5672 +scorpio.messaging.maxSize=134217728 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +camel.component.rabbitmq.hostname=localhost +camel.component.rabbitmq.port-number=5672 + + diff --git a/scorpio-broker/HistoryQueryManager/target/classes/application-sqs.properties b/scorpio-broker/HistoryQueryManager/target/classes/application-sqs.properties new file mode 100644 index 0000000000000000000000000000000000000000..c883f20d88c35f2b349e02d9bed90e52c4ae7751 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/application-sqs.properties @@ -0,0 +1,15 @@ +#mysettings.kafka.bootstrap.host=${bushost:localhost} +#mysettings.kafka.bootstrap.port=${busport:5672} +#bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +#camel.component.aws2-sqs.maximum-message-size=10485760 +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +mysettings.messageconnection.protocol=sns-fanout + +mysettings.messageconnection.options= +#?addresses=localhost:5672 +scorpio.messaging.maxSize=262144 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true + diff --git a/scorpio-broker/HistoryQueryManager/target/classes/application.properties b/scorpio-broker/HistoryQueryManager/target/classes/application.properties new file mode 100644 index 0000000000000000000000000000000000000000..c8c53537c95e372ab943a13eacca3cd03ba31088 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/application.properties @@ -0,0 +1,47 @@ +quarkus.application.name=history-query-manager +quarkus.http.port=1050 +quarkus.log.level=INFO +quarkus.ssl.native=true +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:9092} +mysettings.postgres.host=${dbhost:localhost} +mysettings.postgres.port=${dbport:5432} +mysettings.postgres.username=${dbuser:ngb} +mysettings.postgres.password=${dbpass:ngb} +mysettings.postgres.database-name=${dbname:ngb} +mysettings.gateway.host=${gateway.host:localhost} +mysettings.gateway.port=${gateway.port:9090} +scorpio.at-context-server=http://at-context-server:2023 +atcontext.url=${scorpio.at-context-server}/ngsi-ld/v1/jsonldContexts/ +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +jdbcurl=jdbc:postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name}?ApplicationName=${quarkus.application.name} +scorpio.gatewayurl=http://localhost:9090 +scorpio.directDB=true +scorpio.history.autorecording=active +scorpio.history.max-limit=1000 +scorpio.history.default-limit=50 +scorpio.history.batch-operations.create.max=1000 +scorpio.history.batch-operations.upsert.max=1000 +scorpio.history.batch-operations.update.max=1000 +scorpio.history.batch-operations.delete.max=1000 +scorpio.history.batch-operations.query.max=1000 +scorpio.topics.entity=ENTITY +scorpio.topics.registry=REGISTRY +scorpio.topics.temporal=TEMPORAL +#Database settings +quarkus.datasource.db-kind=postgresql +quarkus.datasource.username=${mysettings.postgres.username} +quarkus.datasource.password=${mysettings.postgres.password} +quarkus.datasource.jdbc.url=${jdbcurl} +quarkus.datasource.reactive.url=postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name} +quarkus.datasource.reactive.shared=true +quarkus.datasource.reactive.max-size=20 +#quarkus.datasource.reactive.name=blabliblub +quarkus.flyway.migrate-at-start=true +quarkus.flyway.baseline-on-migrate=true +quarkus.flyway.connect-retries=10 +quarkus.flyway.repair-at-start=true +#Kafka settings +selfhostcorecontext=http://localhost:9090/corecontext +ngsild.corecontext=https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context-v1.3.jsonld \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190604.1__entity.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190604.1__entity.sql new file mode 100644 index 0000000000000000000000000000000000000000..a8259f8bf603988add0925c6985b979640cdc13e --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190604.1__entity.sql @@ -0,0 +1,57 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS Entity ( + id TEXT NOT NULL, + type TEXT, + data JSONB NOT NULL, + context JSONB, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + observationSpace GEOMETRY(Geometry, 4326), + operationSpace GEOMETRY(Geometry, 4326), + PRIMARY KEY (id)) +; + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER entity_extract_jsonb_fields BEFORE INSERT OR UPDATE ON entity + FOR EACH ROW EXECUTE PROCEDURE entity_extract_jsonb_fields(); + +-- create indexes for performance +CREATE INDEX i_entity_type ON entity (type); +CREATE INDEX i_entity_createdat ON entity (createdat); +CREATE INDEX i_entity_modifiedat ON entity (modifiedat); +CREATE INDEX i_entity_location ON entity USING GIST (location); +CREATE INDEX i_entity_observationspace ON entity USING GIST (observationspace); +CREATE INDEX i_entity_operationspace ON entity USING GIST (operationspace); + +-- to check if this index will be used by the database optimizer, or if it should be applied only for for certain keys +-- check https://www.postgresql.org/docs/current/static/datatype-json.html +CREATE INDEX i_entity_data ON entity USING GIN (data); diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190604.2__registry.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190604.2__registry.sql new file mode 100644 index 0000000000000000000000000000000000000000..51d767f1a6ee0368765817ebce039132d1160bd9 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190604.2__registry.sql @@ -0,0 +1,135 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS csource ( + id TEXT NOT NULL, + data JSONB NOT NULL, + type TEXT, + name TEXT, + description TEXT, + timestamp_start TIMESTAMP, + timestamp_end TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + expires TIMESTAMP, + endpoint TEXT, + internal boolean default false, + has_registrationinfo_with_attrs_only BOOL NOT NULL DEFAULT FALSE, + has_registrationinfo_with_entityinfo_only BOOL NOT NULL DEFAULT FALSE, + PRIMARY KEY (id)) +; + +-- create indexes for performance +CREATE INDEX i_csource_data ON csource USING GIN (data); +CREATE INDEX i_csource_name ON csource (name); +CREATE INDEX i_csource_timestamp_start ON csource (timestamp_start); +CREATE INDEX i_csource_timestamp_end ON csource (timestamp_end); +CREATE INDEX i_csource_location ON csource USING GIST (location); +CREATE INDEX i_csource_expires ON csource (expires); +CREATE INDEX i_csource_endpoint ON csource (endpoint); +CREATE INDEX i_csource_internal ON csource (internal); + +CREATE TABLE IF NOT EXISTS csourceinformation ( + id BIGSERIAL, + csource_id TEXT NOT NULL REFERENCES csource(id) ON DELETE CASCADE ON UPDATE CASCADE, + group_id BIGINT, + entity_id TEXT, + entity_idpattern TEXT, + entity_type TEXT, + property_id TEXT, + relationship_id TEXT, + PRIMARY KEY (id)) +; +CREATE SEQUENCE csourceinformation_group_id_seq OWNED BY csourceinformation.group_id; -- used by csource trigger +-- create indexes for performance +CREATE INDEX i_csourceinformation_csource_id ON csourceinformation (csource_id); +CREATE INDEX i_csourceinformation_entity_type_id_idpattern ON csourceinformation (entity_type, entity_id, entity_idpattern); +CREATE INDEX i_csourceinformation_entity_type_id ON csourceinformation (entity_type, entity_id); +CREATE INDEX i_csourceinformation_entity_type_idpattern ON csourceinformation (entity_type, entity_idpattern); +CREATE INDEX i_csourceinformation_property_id ON csourceinformation (property_id); +CREATE INDEX i_csourceinformation_relationship_id ON csourceinformation (relationship_id); +CREATE INDEX i_csourceinformation_group_property_relationship ON csourceinformation (group_id, property_id, relationship_id); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields_to_information_table(); \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190604.3__temporal.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190604.3__temporal.sql new file mode 100644 index 0000000000000000000000000000000000000000..bc9d603a2b3ed51124507c18e77df266bb80c91a --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190604.3__temporal.sql @@ -0,0 +1,105 @@ +BEGIN; + +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS temporalentity ( + id TEXT NOT NULL, + type TEXT, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + PRIMARY KEY (id)) +; + +CREATE TABLE IF NOT EXISTS temporalentityattrinstance ( + internalid BIGSERIAL, + temporalentity_id TEXT NOT NULL REFERENCES temporalentity(id) ON DELETE CASCADE ON UPDATE CASCADE, + attributeid TEXT NOT NULL, + instanceid TEXT, + attributetype TEXT, + value TEXT, -- object (relationship) is also stored here + geovalue GEOMETRY, + createdat TIMESTAMP, + modifiedat TIMESTAMP, + observedat TIMESTAMP, + data JSONB NOT NULL, + static BOOL NOT NULL, + PRIMARY KEY (internalid)) +; +CREATE UNIQUE INDEX i_temporalentityattrinstance_entityid_attributeid_instanceid ON temporalentityattrinstance (temporalentity_id, attributeid, instanceid); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_extract_jsonb_fields BEFORE INSERT OR UPDATE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_update_static() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + f_count integer; + BEGIN + select into f_internalid, f_count min(internalid), count(1) from temporalentityattrinstance + where temporalentity_id = OLD.temporalentity_id AND attributeid = OLD.attributeid; + IF (f_count = 1) THEN + UPDATE temporalentityattrinstance SET static = true WHERE internalid = f_internalid; + END IF; + RETURN OLD; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_update_static AFTER DELETE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_update_static(); + +-- create indexes for performance + +CREATE INDEX i_temporalentity_type ON temporalentity (type); + +CREATE INDEX i_temporalentityattrinstance_data ON temporalentityattrinstance USING GIN (data); + +COMMIT; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190611.1__sysattrs.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190611.1__sysattrs.sql new file mode 100644 index 0000000000000000000000000000000000000000..3e24ff4111eb19227e14f1b3a2b90a2d863f337e --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190611.1__sysattrs.sql @@ -0,0 +1,7 @@ +-- entity +ALTER TABLE entity ALTER data DROP NOT NULL; +ALTER TABLE entity ADD data_without_sysattrs JSONB; + +-- csource +ALTER TABLE csource ALTER data DROP NOT NULL; +ALTER TABLE csource ADD data_without_sysattrs JSONB; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ab6fe3c66556cde740eaf56948e95d4556fa7c9 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql @@ -0,0 +1,121 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190703.1__keyvalues.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190703.1__keyvalues.sql new file mode 100644 index 0000000000000000000000000000000000000000..fae8d021431fadf39732600f684e69f5aa43447a --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190703.1__keyvalues.sql @@ -0,0 +1 @@ +ALTER TABLE entity ADD kvdata JSONB; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..13b2ff5ba21ce08cd58465e6b7b9240c592f6f5c --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql @@ -0,0 +1,131 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20210206.1__tenant_function.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20210206.1__tenant_function.sql new file mode 100644 index 0000000000000000000000000000000000000000..899626ca4ed38154b7e8344e98e1e0b41459d391 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20210206.1__tenant_function.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20210206.2__tenant_field.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20210206.2__tenant_field.sql new file mode 100644 index 0000000000000000000000000000000000000000..6e5e7a7599f89a684574be098ed4a96d75068c1d --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20210206.2__tenant_field.sql @@ -0,0 +1 @@ +ALTER TABLE csource ADD tenant_id TEXT; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20210206.3__tenant_table.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20210206.3__tenant_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ea65d8e5fd612f8a5f0a3cd20d9ae081aba11f1 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20210206.3__tenant_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS tenant ( + tenant_id TEXT NOT NULL, + database_name varchar(255) UNIQUE, + PRIMARY KEY (tenant_id) +); \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20211217.1__subscription_table.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20211217.1__subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..104b878e08881a8de88364102af8b82ac5cd1a1f --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20211217.1__subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..28f87847b253efcabcac9dc467a64ea1774766fa --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS registry_subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql new file mode 100644 index 0000000000000000000000000000000000000000..b8fc302dd290e0b4a560b3b5bf0c09e5fa0a199a --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql @@ -0,0 +1,163 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}')) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}')) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getGeoJson (ldjson jsonb) +RETURNS jsonb AS $geojson$ +declare + geojson jsonb; +BEGIN + SELECT json_build_object('type', substring(ldjson#>>'{@type,0}' from 32),'coordinates',getCoordinates(ldjson#>'{https://purl.org/geojson/vocab#coordinates,0,@list}')) into geojson; + RETURN geojson; +END; +$geojson$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220124.1__scope_support.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220124.1__scope_support.sql new file mode 100644 index 0000000000000000000000000000000000000000..40f3e01afad101fbea692822b60923ab63123965 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220124.1__scope_support.sql @@ -0,0 +1,52 @@ +ALTER TABLE public.entity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes,'{-1}', (i#>'{@value}')) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..77f733a8e2015aac5d0c1190fb0b5bbd6256fd24 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..0167acd3afc6a30007b262cef29778be77ec9089 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql @@ -0,0 +1,103 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220126.1__scope_support_2.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220126.1__scope_support_2.sql new file mode 100644 index 0000000000000000000000000000000000000000..6f7224edef85a212c0e339117292b2fbd78307e1 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220126.1__scope_support_2.sql @@ -0,0 +1,128 @@ +ALTER TABLE public.csource + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +ALTER TABLE public.temporalentity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220127.1__scope_support_3.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220127.1__scope_support_3.sql new file mode 100644 index 0000000000000000000000000000000000000000..aef923126f490e1683b02763d8cb70eb7f971c26 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220127.1__scope_support_3.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..a27bbc3ad1a40b4e5e7ad176746076c6cace0d70 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql new file mode 100644 index 0000000000000000000000000000000000000000..7710a0ee88d8dfd878acef4b862d42c051bb0d56 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}'), true) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}'), true) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql new file mode 100644 index 0000000000000000000000000000000000000000..6b5247225608c9e0224d3e823dcfa651b14cdfb0 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql new file mode 100644 index 0000000000000000000000000000000000000000..64998eb0a070a7e846fb27e46173897875035395 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + IF scopes IS NULL THEN + return false; + END IF; + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql new file mode 100644 index 0000000000000000000000000000000000000000..3fcb41a0d6a8461a015ac825c6a21ec9af3476e9 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql @@ -0,0 +1,150 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; + +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..36f137d1768dfa06191276d5fbb6cdf1319b1ef6 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = FALSE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20221122.1__move161.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20221122.1__move161.sql new file mode 100644 index 0000000000000000000000000000000000000000..2bfd6cf469984dc77c1e20130833088fd0b3423d --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20221122.1__move161.sql @@ -0,0 +1,554 @@ +DROP TABLE csourceinformation; + +Alter table public.csource DROP COLUMN "location",DROP COLUMN "name", DROP COLUMN endpoint,DROP COLUMN description,DROP COLUMN timestamp_end,DROP COLUMN timestamp_start,DROP COLUMN tenant_id,DROP COLUMN internal,DROP COLUMN has_registrationinfo_with_attrs_only,DROP COLUMN has_registrationinfo_with_entityinfo_only,DROP COLUMN data_without_sysattrs,DROP COLUMN scopes, DROP COLUMN expires, DROP COLUMN type; + +ALTER TABLE PUBLIC.CSOURCE RENAME COLUMN data TO REG; + +alter table public.csource rename column id to c_id; + +ALTER TABLE PUBLIC.CSOURCE DROP CONSTRAINT csource_pkey; + +ALTER TABLE IF EXISTS public.csource + ADD CONSTRAINT unique_c_id UNIQUE (c_id); + +ALTER TABLE IF EXISTS public.csource + ADD COLUMN id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ); + +ALTER TABLE public.csource ADD PRIMARY KEY (id); + +CREATE INDEX i_csource_c_id + ON public.csource USING hash + (c_id text_pattern_ops); + +CREATE INDEX i_csource_id + ON public.csource USING btree + (id); + + +CREATE TABLE public.csourceinformation( + id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ), + cs_id bigint, + c_id text, + e_id text, + e_id_p text, + e_type text, + e_prop text, + e_rel text, + i_location GEOMETRY(Geometry, 4326), + scopes text[], + expires timestamp without time zone, + endpoint text, + tenant_id text, + headers jsonb, + reg_mode smallint, + createEntity boolean, + updateEntity boolean, + appendAttrs boolean, + updateAttrs boolean, + deleteAttrs boolean, + deleteEntity boolean, + createBatch boolean, + upsertBatch boolean, + updateBatch boolean, + deleteBatch boolean, + upsertTemporal boolean, + appendAttrsTemporal boolean, + deleteAttrsTemporal boolean, + updateAttrsTemporal boolean, + deleteAttrInstanceTemporal boolean, + deleteTemporal boolean, + mergeEntity boolean, + replaceEntity boolean, + replaceAttrs boolean, + mergeBatch boolean, + retrieveEntity boolean, + queryEntity boolean, + queryBatch boolean, + retrieveTemporal boolean, + queryTemporal boolean, + retrieveEntityTypes boolean, + retrieveEntityTypeDetails boolean, + retrieveEntityTypeInfo boolean, + retrieveAttrTypes boolean, + retrieveAttrTypeDetails boolean, + retrieveAttrTypeInfo boolean, + createSubscription boolean, + updateSubscription boolean, + retrieveSubscription boolean, + querySubscription boolean, + deleteSubscription boolean, + entityMap boolean, + canCompress boolean, + CONSTRAINT id_pkey PRIMARY KEY (id), + CONSTRAINT cs_id_fkey FOREIGN KEY (cs_id) + REFERENCES public.csource (id) MATCH SIMPLE + ON UPDATE CASCADE + ON DELETE CASCADE +); + + +CREATE INDEX IF NOT EXISTS fki_cs_id_fkey + ON public.csourceinformation(cs_id); + +CREATE INDEX i_csourceinformation_e_type + ON public.csourceinformation USING hash + (e_type text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_rel + ON public.csourceinformation USING hash + (e_rel text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_prop + ON public.csourceinformation USING hash + (e_prop text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_id + ON public.csourceinformation USING hash + (e_id text_pattern_ops); + +CREATE INDEX i_csourceinformation_i_location + ON public.csourceinformation USING gist + (i_location gist_geometry_ops_2d); + +DROP FUNCTION public.csource_extract_jsonb_fields_to_information_table cascade; +DROP Trigger csource_extract_jsonb_fields ON csource; + +CREATE TABLE temp ( + c_id text, + reg jsonb +); +INSERT INTO temp SELECT c_id, reg FROM csource; + +DELETE FROM csource; + +INSERT INTO csource SELECT c_id, reg FROM temp; + +drop table temp; + +ALTER TABLE PUBLIC.ENTITY RENAME COLUMN DATA TO ENTITY; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN DATA_WITHOUT_SYSATTRS; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN KVDATA; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OBSERVATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OPERATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN CONTEXT; + +ALTER TABLE PUBLIC.ENTITY ADD COLUMN E_TYPES TEXT[]; + +CREATE INDEX "I_entity_scopes" + ON public.entity USING gin + (scopes array_ops); + +CREATE INDEX "I_entity_types" + ON public.entity USING gin + (e_types array_ops); + +CREATE OR REPLACE FUNCTION public.entity_extract_jsonb_fields() RETURNS trigger LANGUAGE plpgsql AS $function$ + BEGIN + + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NULL AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NOT NULL AND OLD.ENTITY <> NEW.ENTITY) THEN + NEW.createdat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + IF (NEW.ENTITY@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.ENTITY ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + NEW.scopes = getScopes(NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + RETURN NEW; + END; +$function$; + +UPDATE ENTITY SET E_TYPES=array_append(E_TYPES,TYPE); + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN type; + + +CREATE OR REPLACE FUNCTION CSOURCE_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE +BEGIN + NEW.C_ID = NEW.REG#>>'{@id}'; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,false,false]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS(); + +CREATE OR REPLACE FUNCTION GETMODE (MODETEXT text) RETURNS smallint AS $registry_mode$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$registry_mode$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; + + + +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + RETURN jsonb_set(ENTITY,ARRAY[attribName], tmp); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; + + diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230108.1__subscription161.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230108.1__subscription161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c8115353d5ba16497cc30b10ef8a1fe6e0915041 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230108.1__subscription161.sql @@ -0,0 +1,18 @@ +DROP TABLE subscriptions; +DROP TABLE registry_subscriptions; + +CREATE TABLE public.subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); + +CREATE TABLE public.registry_subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230212.1__context.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230212.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..665c49dd33b0c8c5bfea4e2361c29df16fd01e7d --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230212.1__context.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS public.contexts +( + id text NOT NULL, + body jsonb NOT NULL, + kind text NOT NULL, + createdat timestamp without time zone, + PRIMARY KEY (id) +); +ALTER TABLE public.contexts alter createdat set default now(); diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230220.1__batchops161.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230220.1__batchops161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c31264330e2d38c953e892ff29b43295aedfc5ea --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230220.1__batchops161.sql @@ -0,0 +1,99 @@ +CREATE OR REPLACE FUNCTION NGSILD_CREATEBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOCR$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj['success'] = resultObj['success'] || (entity->'@id')::jsonb; + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOCR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_agg(entityId); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || (newentity->'@id')::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230309.1__datamigration161.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230309.1__datamigration161.sql new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230311.1__temporal161.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230311.1__temporal161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c502a34416bf47b00231f8be37f6dba50a7c0c55 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230311.1__temporal161.sql @@ -0,0 +1,65 @@ +ALTER TABLE PUBLIC.temporalentity ADD COLUMN E_TYPES TEXT[]; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN VALUE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN attributetype; +CREATE INDEX "I_temporalentity_types" + ON public.temporalentity USING gin + (e_types array_ops); +UPDATE temporalentity SET E_TYPES=array_append(E_TYPES,TYPE); +ALTER TABLE PUBLIC.temporalentity DROP COLUMN type; +ALTER TABLE PUBLIC.temporalentity ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN static; +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopeEntry (scopeList text[]) +RETURNS jsonb AS $scopes$ +declare + scopes jsonb; + i text; +BEGIN + scopes := '[]'::jsonb; + FOREACH i IN ARRAY scopeList LOOP + scopes = scopes || jsonb_build_object('@value', i); + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + if scopeList is null THEN + RETURN null; + END IF; + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE INDEX i_temporalentityattrinstance_attribname + ON public.temporalentityattrinstance USING hash + (attributeid text_ops); +CREATE INDEX i_temporalentity_location ON public.temporalentityattrinstance USING GIST (geovalue); \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230410.1__entitymap.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230410.1__entitymap.sql new file mode 100644 index 0000000000000000000000000000000000000000..92b172eb27cbfb372bfc729a44b1009b3946e4d5 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230410.1__entitymap.sql @@ -0,0 +1,19 @@ +CREATE TABLE public.entitymap +( + "q_token" text NOT NULL, + "entity_id" text, + "remote_hosts" jsonb, + "order_field" numeric NOT NULL +); + +CREATE INDEX i_entitymap_qtoken + ON public.entitymap USING hash + ("q_token" text_pattern_ops) +; + +CREATE TABLE public.entitymap_management +( + q_token text NOT NULL, + last_access timestamp with time zone NOT NULL, + PRIMARY KEY (q_token) +); diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230623.1__merge_patch.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230623.1__merge_patch.sql new file mode 100644 index 0000000000000000000000000000000000000000..684f327524131fa450d4e3deba24b4ab762ed4db --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230623.1__merge_patch.sql @@ -0,0 +1,36 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +if merged_json::text like '%"urn:ngsi-ld:null"%' THEN +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +end if; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..29a8a59a3c89cdad8b22af1254310c3d3f88c4c9 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql @@ -0,0 +1,29 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id'; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230705.1__core_context_store.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230705.1__core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..66bf42339d3705b05931f4a532703aa74769dc73 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230705.1__core_context_store.sql @@ -0,0 +1,300 @@ +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } +} +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql new file mode 100644 index 0000000000000000000000000000000000000000..af7e046119aac14e17ee33dc1cc6a074d723977c --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql @@ -0,0 +1,128 @@ +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230726.1__fixsubs.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230726.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..4520fbc02736783525f5e80a3980b023ce99263c --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230726.1__fixsubs.sql @@ -0,0 +1 @@ +update subscriptions set subscription=subscription-'https://uri.etsi.org/ngsi-ld/lastFailure ' \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230810.1__historyup.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230810.1__historyup.sql new file mode 100644 index 0000000000000000000000000000000000000000..06402b2bf88db1ca416edda068dc0dee6706574d --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230810.1__historyup.sql @@ -0,0 +1,39 @@ +ALTER TABLE IF EXISTS public.temporalentityattrinstance + ADD COLUMN IF NOT EXISTS location geometry; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_location + ON public.temporalentityattrinstance USING gist + (location) + WITH (buffering=auto) +; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_entityid + ON public.temporalentityattrinstance USING hash + (temporalentity_id) +; +with x as (SELECT distinct temporalentity_id as eid, geovalue, modifiedat as mat, observedat as oat, COALESCE(modifiedat, observedat) FROM temporalentityattrinstance WHERE geovalue is not null ORDER BY COALESCE(modifiedat, observedat)) UPDATE temporalentityattrinstance SET location = (SELECT x.geovalue FROM x WHERE eid = temporalentity_id and COALESCE(x.mat, x.oat) <= COALESCE(modifiedat, observedat) ORDER BY COALESCE(modifiedat, observedat) DESC limit 1) WHERE location is not null; + +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ + diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql new file mode 100644 index 0000000000000000000000000000000000000000..a17d3b8879ba7f194546f3f3ace5f41e42e9a2ec --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql @@ -0,0 +1,52 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql new file mode 100644 index 0000000000000000000000000000000000000000..82cac5034c11506304e8109eb2aa122cd408b952 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql @@ -0,0 +1,56 @@ +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + IF not attribValues ? 'https://uri.etsi.org/ngsi-ld/modifiedAt' THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + tmp := jsonb_set(tmp,Array['0','https://uri.etsi.org/ngsi-ld/modifiedAt'], Entity->'https://uri.etsi.org/ngsi-ld/modifiedAt',true); + END IF; + RETURN jsonb_set(Entity,Array[attribName,'0'], (Entity->attribName->0) || (tmp->0),true); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..833426b43969a0c3842988b8d0631e776f23cbd0 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql @@ -0,0 +1,314 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabularyProperty": "ngsi-ld:VocabularyProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230904.1__fixsubs.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230904.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..02ca66926497a6b82e4bcf2d39ad6a5e9ec38489 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20230904.1__fixsubs.sql @@ -0,0 +1 @@ +UPDATE SUBSCRIPTIONS SET SUBSCRIPTION=JSONB_SET(SUBSCRIPTION, '{@id}', ('"'||SUBSCRIPTION_ID||'"')::jsonb, true); \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql new file mode 100644 index 0000000000000000000000000000000000000000..a09bbd49ecbaa11601b43f09a7d630fcbcaf446b --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql @@ -0,0 +1,96 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', entityId, 'old', prev_entity); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql new file mode 100644 index 0000000000000000000000000000000000000000..5088d096c22fe1aa5e8b82aa5391b25dbd76a0e3 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql @@ -0,0 +1,57 @@ +DROP FUNCTION merge_json(text,jsonb); + +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; +ret := jsonb_build_array(previous_entity, merged_json); + + RETURN ret; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..795a2f213be016348be3eebc8c31bcd77c9f3a8f --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql @@ -0,0 +1,25 @@ +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE teai.internalid = new.internalid and COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql new file mode 100644 index 0000000000000000000000000000000000000000..a7437255d864ad92561c657c4e23a22cb4d951b5 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql @@ -0,0 +1,75 @@ +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb, NOOVERWRITE boolean) RETURNS jsonb AS $ENTITYOAR$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Overwriting'); + ELSIF NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + ELSE + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + + RETURN resultObj; +END; +$ENTITYOAR$ +LANGUAGE PLPGSQL; + + +ALTER TABLE temporalentityattrinstance ADD COLUMN IF NOT EXISTS static boolean \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231128.1__upsertfix.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231128.1__upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..573c77b1b3701ed5532925bada113667267c7dbe --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231128.1__upsertfix.sql @@ -0,0 +1,44 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..017016b3606fcb09d107b10217acec17bb799c2d --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql @@ -0,0 +1,363 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240212.1__merge_batchops.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240212.1__merge_batchops.sql new file mode 100644 index 0000000000000000000000000000000000000000..c5da5b65a9b6a9189123871366d0d474a238c250 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240212.1__merge_batchops.sql @@ -0,0 +1,66 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON_BATCH(b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id',newentity->'@id')::jsonb; + ELSE + resultObj['failure'] := resultObj['failure'] || jsonb_object_agg(newentity->'@id'->>0, 'Not Found'); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240319.1__context.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240319.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..38ae052ffe9a214504c3912b7b5e6c1a92b17308 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240319.1__context.sql @@ -0,0 +1,365 @@ +ALTER TABLE public.contexts add column lastUsage timestamp without time zone, add column numberOfHits bigint default 0; + +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'Hosted'); \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..19e8cf97e5ecba2781bc4d559f05787b4fd3e9a3 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql @@ -0,0 +1,663 @@ + +DROP TABLE IF EXISTS public.entitymap; +DROP TABLE IF EXISTS public.entitymap_management; +DROP FUNCTION IF EXISTS ngsild_appendbatch(jsonb); +DROP FUNCTION IF EXISTS ngsild_upsertbatch(jsonb); + +CREATE OR REPLACE FUNCTION public.ngsild_deletebatch(IN entity_ids jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, 'Not Found')); + else + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', prev_entity)); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_createbatch(IN entities jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || (entity->'@id')::jsonb); + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_appendbatch(IN entities jsonb,IN nooverwrite boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Overwriting')); + ELSIF NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + ELSE + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb); + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_upsertbatch(IN entities jsonb,IN do_replace boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE TABLE public.entitymap +( + id text, + expires_at timestamp without time zone, + last_access timestamp without time zone, + entity_map jsonb, + followup_select text, + PRIMARY KEY (id) +); + +CREATE OR REPLACE FUNCTION public.getmode(IN modetext text) + RETURNS smallint + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.updateMapIfNeeded(IN ids text[], ientityMap jsonb, entityMapToken text) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entityMapEntry jsonb; + +BEGIN + if array_length(ids, 1) = 0 or ids is null then + return ientityMap; + else + entityMapEntry := ientityMap -> 'entityMap'; + SELECT jsonb_agg(entry) INTO entityMapEntry FROM jsonb_array_elements(entityMapEntry) as entry, jsonb_object_keys(entry) as id WHERE NOT(id = ANY(ids)); + ientityMap := jsonb_set(ientityMap, '{entityMap}', entityMapEntry); + UPDATE ENTITYMAP SET LAST_ACCESS = NOW(), entity_map = ientityMap WHERE id=entityMapToken; + return ientityMap; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.getEntityMapAndEntities(IN entityMapToken text, ids text[], ilimit int, ioffset int) + RETURNS TABLE(id text, entity jsonb, parent boolean, e_types text[], entity_map jsonb) + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entitymap jsonb; + regempty boolean; + noRootLevelRegEntry boolean; + queryText text; +BEGIN + if ids is null or array_length(ids, 1) = 0 then + UPDATE ENTITYMAP SET LAST_ACCESS = NOW() WHERE ENTITYMAP.id=entityMapToken RETURNING ENTITYMAP.ENTITY_MAP INTO entitymap; + if entitymap is null then + RAISE EXCEPTION 'Nonexistent ID --> %', entityMapToken USING ERRCODE = 'S0001'; + end if; + regempty := entitymap -> 'regEmptyOrNoRegEntryAndNoLinkedQuery'; + noRootLevelRegEntry := entitymap -> 'noRootLevelRegEntryAndLinkedQuery'; + + if regempty or noRootLevelRegEntry then + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY > $2), ' + || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ' limit $3), X as (SELECT D0.ID as id, max(D0.ordinality) as maxOrdinality FROM D0 GROUP BY D0.ID), C as (SELECT updateMapIfNeeded(ids.aggIds, $4, $5) as entity_map FROM (SELECT ARRAY_AGG(a.id) as aggIds FROM a LEFT JOIN X ON a.id = X.ID WHERE X.ID IS NULL AND a.ordinality <= X.maxOrdinality) as ids)' + || (entitymap ->> 'finalselect')) using (entitymap->'entityMap'), ioffset, ilimit, entitymap, entityMapToken; + else + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY between $2 and ($2 + $3) and entityIdEntry.value ? ''@none''), C as (SELECT $4 as entity_map), ' || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ')' ||(entitymap ->> 'finalselect')) using entitymap->'entityMap', ioffset, ilimit, entitymap; + end if; + else + if regempty or noRootLevelRegEntry then + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + else + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + end if; + end if; +END; +$BODY$; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS entitymap; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS cancompress; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN queryEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN createEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN updateEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN deleteEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN retrieveEntityMap boolean; + +UPDATE public.csourceinformation SET queryEntityMap = false,createEntityMap = false, updateEntityMap = false, deleteEntityMap = false,retrieveEntityMap = false; + +CREATE OR REPLACE FUNCTION public.getoperations(IN operationjson jsonb) + RETURNS boolean[] + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + operations[37] = true; + operations[38] = true; + operations[39] = true; + operations[40] = true; + operations[41] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'queryEntityMap' THEN + operations[37] = true; + WHEN 'createEntityMap' THEN + operations[38] = true; + WHEN 'updateEntityMap' THEN + operations[39] = true; + WHEN 'deleteEntityMap' THEN + operations[40] = true; + WHEN 'retrieveEntityMap' THEN + operations[41] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.csourceinformation_extract_jsonb_fields() + RETURNS trigger + LANGUAGE 'plpgsql' + VOLATILE + COST 100 +AS $BODY$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..474a2ef4780544dc6697fefec62900f6c79bc1ed --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql @@ -0,0 +1,834 @@ +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id',newentity->>'@id', 'old', previous_entity)); + ELSE + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$BODY$; + +UPDATE contexts SET body = '{ + + "@context": { + + "@version": 1.1, + + "@protected": true, + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + + "geojson": "https://purl.org/geojson/vocab#", + + "id": "@id", + + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + + "AttributeList": "ngsi-ld:AttributeList", + + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + + "Date": "ngsi-ld:Date", + + "DateTime": "ngsi-ld:DateTime", + + "EntityType": "ngsi-ld:EntityType", + + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + + "EntityTypeList": "ngsi-ld:EntityTypeList", + + "Feature": "geojson:Feature", + + "FeatureCollection": "geojson:FeatureCollection", + + "GeoProperty": "ngsi-ld:GeoProperty", + + "GeometryCollection": "geojson:GeometryCollection", + + "JsonProperty": "ngsi-ld:JsonProperty", + + "LanguageProperty": "ngsi-ld:LanguageProperty", + + "LineString": "geojson:LineString", + + "ListProperty": "ngsi-ld:ListProperty", + + "ListRelationship": "ngsi-ld:ListRelationship", + + "MultiLineString": "geojson:MultiLineString", + + "MultiPoint": "geojson:MultiPoint", + + "MultiPolygon": "geojson:MultiPolygon", + + "Notification": "ngsi-ld:Notification", + + "Point": "geojson:Point", + + "Polygon": "geojson:Polygon", + + "Property": "ngsi-ld:Property", + + "Relationship": "ngsi-ld:Relationship", + + "Subscription": "ngsi-ld:Subscription", + + "TemporalProperty": "ngsi-ld:TemporalProperty", + + "Time": "ngsi-ld:Time", + + "VocabProperty": "ngsi-ld:VocabProperty", + + "accept": "ngsi-ld:accept", + + "attributeCount": "attributeCount", + + "attributeDetails": "attributeDetails", + + "attributeList": { + + "@id": "ngsi-ld:attributeList", + + "@type": "@vocab" + + }, + + "attributeName": { + + "@id": "ngsi-ld:attributeName", + + "@type": "@vocab" + + }, + + "attributeNames": { + + "@id": "ngsi-ld:attributeNames", + + "@type": "@vocab" + + }, + + "attributeTypes": { + + "@id": "ngsi-ld:attributeTypes", + + "@type": "@vocab" + + }, + + "attributes": { + + "@id": "ngsi-ld:attributes", + + "@type": "@vocab" + + }, + + "attrs": "ngsi-ld:attrs", + + "avg": { + + "@id": "ngsi-ld:avg", + + "@container": "@list" + + }, + + "bbox": { + + "@container": "@list", + + "@id": "geojson:bbox" + + }, + + "cacheDuration": "ngsi-ld:cacheDuration", + + "containedBy": "ngsi-ld:isContainedBy", + + "contextSourceAlias": "ngsi-ld:contextSourceAlias", + + "contextSourceExtras": { + + "@id": "ngsi-ld:contextSourceExtras", + + "@type": "@json" + + }, + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + + "contextSourceTimeAt": { + + "@id": "ngsi-ld:contextSourceTimeAt", + + "@type": "DateTime" + + }, + + "contextSourceUptime": "ngsi-ld:contextSourceUptime", + + "cooldown": "ngsi-ld:cooldown", + + "coordinates": { + + "@container": "@list", + + "@id": "geojson:coordinates" + + }, + + "createdAt": { + + "@id": "ngsi-ld:createdAt", + + "@type": "DateTime" + + }, + + "csf": "ngsi-ld:csf", + + "data": "ngsi-ld:data", + + "dataset": { + + "@id": "ngsi-ld:hasDataset", + + "@container": "@index" + + }, + + "datasetId": { + + "@id": "ngsi-ld:datasetId", + + "@type": "@id" + + }, + + "deletedAt": { + + "@id": "ngsi-ld:deletedAt", + + "@type": "DateTime" + + }, + + "description": "http://purl.org/dc/terms/description", + + "detail": "ngsi-ld:detail", + + "distinctCount": { + + "@id": "ngsi-ld:distinctCount", + + "@container": "@list" + + }, + + "endAt": { + + "@id": "ngsi-ld:endAt", + + "@type": "DateTime" + + }, + + "endTimeAt": { + + "@id": "ngsi-ld:endTimeAt", + + "@type": "DateTime" + + }, + + "endpoint": "ngsi-ld:endpoint", + + "entities": "ngsi-ld:entities", + + "pick": "ngsi-ld:pick", + + "omit": "ngsi-ld:omit", + + "jsonKeys": "ngsi-ld:jsonKeys", + + "entity": "ngsi-ld:entity", + + "entityCount": "ngsi-ld:entityCount", + + "entityId": { + + "@id": "ngsi-ld:entityId", + + "@type": "@id" + + }, + + "entityList": { + + "@id": "ngsi-ld:entityList", + + "@container": "@list" + + }, + + "entityMap": "ngsi-ld:hasEntityMap", + + "error": "ngsi-ld:error", + + "errors": "ngsi-ld:errors", + + "expiresAt": { + + "@id": "ngsi-ld:expiresAt", + + "@type": "DateTime" + + }, + + "features": { + + "@container": "@set", + + "@id": "geojson:features" + + }, + + "format": "ngsi-ld:format", + + "geoQ": "ngsi-ld:geoQ", + + "geometry": "geojson:geometry", + + "geoproperty": "ngsi-ld:geoproperty", + + "georel": "ngsi-ld:georel", + + "idPattern": "ngsi-ld:idPattern", + + "information": "ngsi-ld:information", + + "instanceId": { + + "@id": "ngsi-ld:instanceId", + + "@type": "@id" + + }, + + "isActive": "ngsi-ld:isActive", + + "join": "ngsi-ld:join", + + "joinLevel": "ngsi-ld:hasJoinLevel", + + "json": { + + "@id": "ngsi-ld:hasJSON", "@type": "@json" + + }, + + "jsons": { + + "@id": "ngsi-ld:jsons", + + "@container": "@list" + + }, + + "key": "ngsi-ld:hasKey", + + "lang": "ngsi-ld:lang", + + "languageMap": { + + "@id": "ngsi-ld:hasLanguageMap", + + "@container": "@language" + + }, + + "languageMaps": { + + "@id": "ngsi-ld:hasLanguageMaps", + + "@container": "@list" + + }, + + "lastFailure": { + + "@id": "ngsi-ld:lastFailure", + + "@type": "DateTime" + + }, + + "lastNotification": { + + "@id": "ngsi-ld:lastNotification", + + "@type": "DateTime" + + }, + + "lastSuccess": { + + "@id": "ngsi-ld:lastSuccess", + + "@type": "DateTime" + + }, + + "linkedMaps": "ngsi-ld:linkedMaps", + + "localOnly": "ngsi-ld:localOnly", + + "location": "ngsi-ld:location", + + "management": "ngsi-ld:management", + + "managementInterval": "ngsi-ld:managementInterval", + + "max": { + + "@id": "ngsi-ld:max", + + "@container": "@list" + + }, + + "min": { + + "@id": "ngsi-ld:min", + + "@container": "@list" + + }, + + "mode": "ngsi-ld:mode", + + "modifiedAt": { + + "@id": "ngsi-ld:modifiedAt", + + "@type": "DateTime" + + }, + + "notification": "ngsi-ld:notification", + + "notificationTrigger": "ngsi-ld:notificationTrigger", + + "notifiedAt": { + + "@id": "ngsi-ld:notifiedAt", + + "@type": "DateTime" + + }, + + "notifierInfo": "ngsi-ld:notifierInfo", + + "notUpdated": "ngsi-ld:notUpdated", + + "object": { + + "@id": "ngsi-ld:hasObject", + + "@type": "@id" + + }, + + "objectList": { + + "@id": "ngsi-ld:hasObjectList", + + "@container": "@list" + + }, + + "objects": { + + "@id": "ngsi-ld:hasObjects", + + "@container": "@list" + + }, + + "objectsLists": { + + "@id": "ngsi-ld:hasObjectsLists", + + "@container": "@list" + + }, + + "objectType": { + + "@id": "ngsi-ld:hasObjectType", + + "@type": "@vocab" + + }, + + "observationInterval": "ngsi-ld:observationInterval", + + "observationSpace": "ngsi-ld:observationSpace", + + "observedAt": { + + "@id": "ngsi-ld:observedAt", + + "@type": "DateTime" + + }, + + "operationSpace": "ngsi-ld:operationSpace", + + "operations": "ngsi-ld:operations", + + "previousJson": { + + "@id": "ngsi-ld:hasPreviousJson", + + "@type": "@json" + + }, + + "previousLanguageMap": { + + "@id": "ngsi-ld:hasPreviousLanguageMap", + + "@container": "@language" + + }, + + "previousObject": { + + "@id": "ngsi-ld:hasPreviousObject", + + "@type": "@id" + + }, + + "previousObjectList": { + + "@id": "ngsi-ld:hasPreviousObjectList", + + "@container": "@list" + + }, + + "previousValue": "ngsi-ld:hasPreviousValue", + + "previousValueList": { + + "@id": "ngsi-ld:hasPreviousValueList", + + "@container": "@list" + + }, + + "previousVocab": { + + "@id": "ngsi-ld:hasPreviousVocab", + + "@type": "@vocab" + + }, + + "properties": "geojson:properties", + + "propertyNames": { + + "@id": "ngsi-ld:propertyNames", + + "@type": "@vocab" + + }, + + "q": "ngsi-ld:q", + + "reason": "ngsi-ld:reason", + + "receiverInfo": "ngsi-ld:receiverInfo", + + "refreshRate": "ngsi-ld:refreshRate", + + "registrationId": "ngsi-ld:registrationId", + + "registrationName": "ngsi-ld:registrationName", + + "relationshipNames": { + + "@id": "ngsi-ld:relationshipNames", + + "@type": "@vocab" + + }, + + "scope": "ngsi-ld:scope", + + "scopeQ": "ngsi-ld:scopeQ", + + "showChanges": "ngsi-ld:showChanges", + + "startAt": { + + "@id": "ngsi-ld:startAt", + + "@type": "DateTime" + + }, + + "status": "ngsi-ld:status", + + "stddev": { + + "@id": "ngsi-ld:stddev", + + "@container": "@list" + + }, + + "subscriptionId": { + + "@id": "ngsi-ld:subscriptionId", + + "@type": "@id" + + }, + + "subscriptionName": "ngsi-ld:subscriptionName", + + "success": { + + "@id": "ngsi-ld:success", + + "@type": "@id" + + }, + + "sum": { + + "@id": "ngsi-ld:sum", + + "@container": "@list" + + }, + + "sumsq": { + + "@id": "ngsi-ld:sumsq", + + "@container": "@list" + + }, + + "sysAttrs": "ngsi-ld:sysAttrs", + + "temporalQ": "ngsi-ld:temporalQ", + + "tenant": { + + "@id": "ngsi-ld:tenant", + + "@type": "@id" + + }, + + "throttling": "ngsi-ld:throttling", + + "timeAt": { + + "@id": "ngsi-ld:timeAt", + + "@type": "DateTime" + + }, + + "timeInterval": "ngsi-ld:timeInterval", + + "timeout": "ngsi-ld:timeout", + + "timeproperty": "ngsi-ld:timeproperty", + + "timerel": "ngsi-ld:timerel", + + "timesFailed": "ngsi-ld:timesFailed", + + "timesSent": "ngsi-ld:timesSent", + + "title": "http://purl.org/dc/terms/title", + + "totalCount": { + + "@id": "ngsi-ld:totalCount", + + "@container": "@list" + + }, + + "triggerReason": "ngsi-ld:triggerReason", + + "typeList": { + + "@id": "ngsi-ld:typeList", + + "@type": "@vocab" + + }, + + "typeName": { + + "@id": "ngsi-ld:typeName", + + "@type": "@vocab" + + }, + + "typeNames": { + + "@id": "ngsi-ld:typeNames", + + "@type": "@vocab" + + }, + + "unchanged": "ngsi-ld:unchanged", + + "unitCode": "ngsi-ld:unitCode", + + "updated": "ngsi-ld:updated", + + "uri": "ngsi-ld:uri", + + "value": "ngsi-ld:hasValue", + + "valueList": { + + "@id": "ngsi-ld:hasValueList", + + "@container": "@list" + + }, + + "valueLists": { + + "@id": "ngsi-ld:hasValueLists", + + "@container": "@list" + + }, + + "values": { + + "@id": "ngsi-ld:hasValues", + + "@container": "@list" + + }, + + "vocab": { + + "@id": "ngsi-ld:hasVocab", + + "@type": "@vocab" + + }, + + "vocabs": { + + "@id": "ngsi-ld:hasVocabs", + + "@container": "@list" + + }, + + "watchedAttributes": { + + "@id": "ngsi-ld:watchedAttributes", + + "@type": "@vocab" + + }, + + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + + } + +} + +'::jsonb WHERE id=')$%^&'; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..90d4785b7e7d4b82c6ac1bf4c88ac56043f995bc --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql @@ -0,0 +1,963 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_point(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE +BEGIN + if not geo_json_entry ? '@list' or jsonb_array_length(geo_json_entry #> '{@list}') != 2 then + RAISE EXCEPTION 'Invalid geo point for geo json' USING ERRCODE = 'SB006'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.VALIDATE_GEO_JSON(IN GEO_JSON_ENTRY JSONB) RETURNS VOID LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + geo_type text; + value jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.clean_ngsi_ld_null(IN json_entry jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + json_type text; + result jsonb; + value jsonb; + cleaned jsonb; + key text; +BEGIN + json_type = jsonb_typeof(json_entry); + if json_type = 'array' then + result = '[]'::jsonb; + for value in select * from jsonb_array_elements(json_entry) loop + cleaned = clean_ngsi_ld_null(value); + if cleaned is not null then + result = result || cleaned; + end if; + end loop; + if jsonb_array_length(result) = 0 then + return null; + end if; + return result; + elsif json_type = 'object' then + result = '{}'; + for key, value in Select * from jsonb_each(json_entry) loop + if value::text != '"urn:ngsi-ld:null"' then + result = jsonb_set(result, '{key}', value); + end if; + end loop; + if result::text = '{}' then + return null; + end if; + return result; + else + if json_entry::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return json_entry; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_json(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_type text; + old_type text; + todelete jsonb; + deleted integer; + i integer; + index integer; + value jsonb; + value2 jsonb; + merged_json jsonb; + key text; +BEGIN + new_type = jsonb_typeof(new_attrib); + old_type = jsonb_typeof(old_attrib); + if old_attrib is null or new_type != old_type then + old_attrib := new_attrib; + end if; + todelete = '[]'::jsonb; + if new_type = 'array' then + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + for i in 0 .. jsonb_array_length(new_attrib) loop + if new_attrib ->> i = 'urn:ngsi-ld:null' then + todelete = todelete || i; + end if; + end loop; + deleted = 0; + if array_length(todelete) > 0 then + for i in select * from jsonb_array_elements(todelete) loop + new_attrib = new_attrib - (i - deleted); + deleted = deleted + 1; + end loop; + end if; + return new_attrib; + end if; + index = 0; + deleted = 0; + for value in select * from jsonb_array_elements(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + index := index + 1; + continue; + end if; + value2 = old_attrib[index - deleted]; + merged_json = merge_has_json(value, value2); + if merged_json is null then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - deleted)]::text[], merged_json); + end if; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + elsif new_type = 'object' then + for key, value in Select * from jsonb_each(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - key; + continue; + end if; + merged_json = merge_has_json(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + continue; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end loop; + if old_attrib::text = '{}' then + return null; + end if; + return old_attrib; + else + if new_attrib::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return new_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_vocab(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_language_map(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + index integer; + remove boolean; + value2 jsonb; + ln_found boolean; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in Select * from jsonb_array_elements(new_attrib) loop + if value ->> '@language' = '@none' and value ->> '@value' = 'urn:ngsi-ld:null' then + return null; + else + index = 0; + ln_found = false; + remove = false; + for value2 in Select * from jsonb_array_elements(old_attrib) loop + if value2 ->> '@language' = value->> '@language' then + ln_found = true; + if value ->> '@value' = 'urn:ngsi-ld:null' then + remove = true; + end if; + exit; + end if; + index = index + 1; + end loop; + if ln_found then + if remove then + old_attrib = old_attrib - index; + else + old_attrib = jsonb_set(old_attrib, ARRAY[index,'@value']::text[], value->'@value'); + end if; + else + old_attrib = old_attrib || value; + end if; + end if; + end loop; + RETURN old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_geo(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + key text; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,https://purl.org/geojson/vocab#coordinates,0,@list,0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + for value in select * from jsonb_array_elements(new_attrib) loop + PERFORM validate_geo_json(value); + end loop; + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://purl.org/geojson/vocab#coordinates' then + if value2 #>> '{0,@list,0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + elsif key = '@type' then + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + PERFORM validate_geo_json(old_attrib[(index - removed)]); + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib #> '{0,@list}'; + if old_attrib is null then + old_attrib = new_attrib; + end if; + old_value_list = old_attrib #> '{0,@list}'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed)]::text[], (old_attrib #> ARRAY[0,'@list',(index-removed)]::text[]) - key); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed),key]::text[], merged_json); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB004'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; + +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib -> '@list'; + if old_attrib is null then + old_attrib := new_attrib; + end if; + old_value_list = old_attrib -> '@list'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], value2); + end if; + elsif key = '@list' then + merged_json = merge_has_value_list(value, old_value_list[index - removed]); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + + else + merged_json = merge_has_value(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + arr_idx integer; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + arr_idx := index - removed; + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - arr_idx; + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], value2); + end if; + else + arr_idx := index - removed; + merged_json = merge_has_value(value2, old_attrib #> ARRAY[arr_idx,key]::text[]); + if merged_json is null then + old_attrib[arr_idx] = old_attrib[arr_idx] - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_ATTRIB_INSTANCE(IN NEW_ATTRIB JSONB, + + IN OLD_ATTRIB JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + attrib_type TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + new_attrib := new_attrib - 'https://uri.etsi.org/ngsi-ld/createdAt'; + attrib_type := old_attrib #>> '{@type,0}'; + if attrib_type != new_attrib #>> '{@type,0}' then + RAISE EXCEPTION 'Cannot change type of an attribute' USING ERRCODE = 'SB001'; + end if; + if attrib_type = 'https://uri.etsi.org/ngsi-ld/Property' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/Relationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValueList' then + merged_json = merge_has_value_list(value[0], old_attrib #> '{key,0}'); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListRelationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectList' then + merged_json = merge_has_object_list(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/GeoProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value_geo(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/LanguageProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasLanguageMap' then + merged_json = merge_has_language_map(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/VocabProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasVocab' then + merged_json = merge_has_vocab(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/JsonProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasJSON' then + merged_json = merge_has_json(value #> ARRAY[0,'@value']::text[], old_attrib #> ARRAY[key,0,'@value']::text[]); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key,0,'@value']::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + else + RAISE EXCEPTION 'Unknown type of an attribute %, %, %', attrib_type, old_attrib, new_attrib USING ERRCODE = 'SB002'; + end if; + return old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + else + if new_dataset_id is null then + deleted := deleted || 'null'; + else + deleted := deleted || new_dataset_id; + end if; + end if; + else + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_JSON(IN A text,IN B JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL UNSAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + deleted JSONB; + updated JSONB; +BEGIN + +Select entity into previous_entity from entity where id =a; +if previous_entity is null then + RAISE EXCEPTION 'Entity not found.' USING ERRCODE = '02000'; +end if; +Select entity into merged_json from entity where id =a; +deleted := '{}'; +updated := '{}'; +-- Iterate through keys in JSON B +FOR key, value IN SELECT * FROM JSONB_EACH(b) +LOOP + if key = '@id' or key = 'https://uri.etsi.org/ngsi-ld/createdAt'then + continue; + elsif key = '@type' then + value2 = merged_json -> key; + WITH combined AS ( + SELECT jsonb_array_elements(value) AS elem + UNION + SELECT jsonb_array_elements(value2) AS elem + ) + SELECT jsonb_agg(elem) into value2 AS merged_array FROM combined; + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' then + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value); + else + value2 = merged_json -> key; + value2 = merge_attrib(value, value2); + if value2 ->'result' = 'null'::jsonb or jsonb_array_length(value2 ->'result') = 0 then + merged_json = merged_json - key; + deleted = jsonb_set(deleted, ARRAY[key]::text[], '["@all"]'::jsonb); + else + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2 -> 'result'); + if jsonb_array_length(value2 -> 'deleted') != 0 then + if deleted ? key then + deleted = jsonb_set(deleted, ARRAY[key], ((deleted -> key) || (value2 -> 'deleted'))); + else + deleted = jsonb_set(deleted, ARRAY[key], ((value2 -> 'deleted'))); + end if; + end if; + + if jsonb_array_length(value2 -> 'updated') != 0 then + if updated ? key then + updated = jsonb_set(updated, ARRAY[key], ((updated -> key) || (value2 -> 'updated'))); + else + updated = jsonb_set(updated, ARRAY[key], ((value2 -> 'updated'))); + end if; + end if; + + end if; + + + end if; +END LOOP; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + +RETURN jsonb_build_object('old', previous_entity, 'new', merged_json, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; + newentity jsonb; + resultObj jsonb; + entityId text; + index integer; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + index := 0; + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + entityId := newentity->>'@id'; + IF entityId is null then + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object('no id row nr ' || index, 'No entity id provided')); + else + BEGIN + ret := MERGE_JSON(entityId, newentity); + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', ret -> 'old', 'new', ret -> 'new', 'deleted', ret -> 'deleted', 'updated', ret -> 'updated')::jsonb); + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entityId, SQLSTATE)); + END; + end if; + index := index + 1; + END LOOP; + RETURN resultObj; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..f9eea8fca78af88cd9cca10817372067d2fec0e3 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql @@ -0,0 +1,69 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_json(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + geo_type text; + value jsonb; + value2 jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPoint' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi point update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiLineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPolygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/controller/HistoryController.class b/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/controller/HistoryController.class new file mode 100644 index 0000000000000000000000000000000000000000..243d0dddb356135d679d36e613b7071f183976e8 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/controller/HistoryController.class differ diff --git a/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingBase.class b/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingBase.class new file mode 100644 index 0000000000000000000000000000000000000000..194a8a4de4bc9a97842844249404bca1ee8f9f32 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingBase.class differ diff --git a/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingByteArray.class b/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingByteArray.class new file mode 100644 index 0000000000000000000000000000000000000000..514cb41c6668bded9d3738eb4fe7de8248aebeb9 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingByteArray.class differ diff --git a/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingInMemory.class b/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingInMemory.class new file mode 100644 index 0000000000000000000000000000000000000000..123a2ce0c998ca7fd3fb6b8e6a6f6502ac85ed50 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingInMemory.class differ diff --git a/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingString.class b/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingString.class new file mode 100644 index 0000000000000000000000000000000000000000..ebdfae580b0de6eaf8bcb2ae3fbeace8438e0481 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingString.class differ diff --git a/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/repository/HistoryDAO.class b/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/repository/HistoryDAO.class new file mode 100644 index 0000000000000000000000000000000000000000..53b6408727d9dfc71aa106f6e24fbc08a7194d99 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/repository/HistoryDAO.class differ diff --git a/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/service/HistoryQueryService.class b/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/service/HistoryQueryService.class new file mode 100644 index 0000000000000000000000000000000000000000..643b9079349cfa05b30b3359cf6345aedb862307 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/classes/eu/neclab/ngsildbroker/historyquerymanager/service/HistoryQueryService.class differ diff --git a/scorpio-broker/HistoryQueryManager/target/history-query-manager-5.0.5-SNAPSHOT.jar b/scorpio-broker/HistoryQueryManager/target/history-query-manager-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..8e00730edb2c836ad0fff84e35993a4bd4da041d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/history-query-manager-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/maven-archiver/pom.properties b/scorpio-broker/HistoryQueryManager/target/maven-archiver/pom.properties new file mode 100644 index 0000000000000000000000000000000000000000..7cce3f2b7c35708d9a82efc77cd7c2d56723bf97 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/maven-archiver/pom.properties @@ -0,0 +1,5 @@ +#Generated by Maven +#Fri Jan 03 03:28:16 UTC 2025 +groupId=eu.neclab.ngsildbroker +artifactId=history-query-manager +version=5.0.5-SNAPSHOT diff --git a/scorpio-broker/HistoryQueryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst b/scorpio-broker/HistoryQueryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..32bd759dbcf57bc6e102e8c9cbb5d5d280786cc1 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst @@ -0,0 +1,7 @@ +eu/neclab/ngsildbroker/historyquerymanager/controller/HistoryController.class +eu/neclab/ngsildbroker/historyquerymanager/service/HistoryQueryService.class +eu/neclab/ngsildbroker/historyquerymanager/repository/HistoryDAO.class +eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingString.class +eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingInMemory.class +eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingByteArray.class +eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingBase.class diff --git a/scorpio-broker/HistoryQueryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst b/scorpio-broker/HistoryQueryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..90038277e29be524f648d05e2dc5f933122213e8 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst @@ -0,0 +1,7 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryQueryManager/src/main/java/eu/neclab/ngsildbroker/historyquerymanager/controller/HistoryController.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryQueryManager/src/main/java/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingBase.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryQueryManager/src/main/java/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingByteArray.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryQueryManager/src/main/java/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingInMemory.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryQueryManager/src/main/java/eu/neclab/ngsildbroker/historyquerymanager/messaging/HistoryMessagingString.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryQueryManager/src/main/java/eu/neclab/ngsildbroker/historyquerymanager/repository/HistoryDAO.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryQueryManager/src/main/java/eu/neclab/ngsildbroker/historyquerymanager/service/HistoryQueryService.java diff --git a/scorpio-broker/HistoryQueryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst b/scorpio-broker/HistoryQueryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..bd80dded1a9baeda2cfb9d4b4d2ad735dfb7b11e --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst @@ -0,0 +1,3 @@ +eu/neclab/ngsildbroker/historymanager/controller/HistoryControllerTest.class +eu/neclab/ngsildbroker/historymanager/controller/EntityOperationsHistoryControllerTest.class +eu/neclab/ngsildbroker/historymanager/controller/CustomProfile.class diff --git a/scorpio-broker/HistoryQueryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst b/scorpio-broker/HistoryQueryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..4b5daec81cc152bb0068295a5b549bf07e355e6a --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst @@ -0,0 +1,3 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryQueryManager/src/test/java/eu/neclab/ngsildbroker/historymanager/controller/CustomProfile.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryQueryManager/src/test/java/eu/neclab/ngsildbroker/historymanager/controller/EntityOperationsHistoryControllerTest.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/HistoryQueryManager/src/test/java/eu/neclab/ngsildbroker/historymanager/controller/HistoryControllerTest.java diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/app/history-query-manager-5.0.5-SNAPSHOT.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/app/history-query-manager-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..e2cea78125387378050115507a91715313fe13e0 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/app/history-query-manager-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..9d5d452f4a66d1165b27d4d604d2bddaad0e4cd7 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d9c956f0001cff936eb20c373592dcc6510f5ff Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f9f583af71764a7127e4da73677ed5c8dea97bfb Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0447153ded16f86775cdcb337f8576579c1ac0db Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..243fa559c2be0f61720e1a333e43d4da2e8b5516 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ce4cbac674f51eb2063ff475a0e70484b25ace9f Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..55063658b2d25baf50b6a3963c508233f695b3de Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..4a7a8e4b537b532f4f58717f3366e9cb6f0ff0ce Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b320c9d42b93b26981927265e5dc62b85e73263f Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d2520242b28d239ba3b138c17e65f8ae6103a787 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..0648aa9f6a2ea3b3ddd083471f1c8dacbb4bbc4f Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..134f669d9fbee6ca61a9c9bb36227376ed97d0a2 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3365ab16bb0cd576f88b808e9af9b024111a5070 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..778ea557232b0fb41df34d63353c219a371660b2 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fd5c901f4bb0e8ed59d2d040740021a7c5cf1b19 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f74846f3f35230a9e14c0bf98e4cccfec593b4c9 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ee6d31a6a025d92f9fb05550483fb6b415b7f066 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..52835b2540d6cb8aaffe22bd5c7c24203cc77538 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..dba6fdd40e73a6dfc06a462bc2646c2bfb7e6d5a Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c13bcb9104e907195d13bbb4f998c1e5594cc2e8 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..34be9026612b9553f55f5f1aed148fc96a9d8fcb Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3750b8c1cfae96e79305618c78653ac5fb9b6de5 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..327ee706dcf46e428dd6339b9744e77941e5b498 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c47febca79456ba4d389bbb46ea0e11e6a41bede Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6ff58aed923740c9a4f639b9a512dcfd08df921 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3aa01f1ee73130e4983d2c3520220b29995c4ccc Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..288bf56e1b4f5c5a2bb2152887c5ef12e6cddeae Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..f4f1af783043658e2b3879560b6e1ff0b8db66a1 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..85cefa6d7b5b644fb99075f6621ca60beb350cd9 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..c4b2b999ce72dcfdd9b63d6a62d0a2d7e2037034 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1d6d1c50a344b45ba375d53775b70ad5aac58f7 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..18e59c8c41de73e02e77298e981fa7e3051e4b5d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a8f5dfe1b83122a9f085da1aa7fff451ed88e783 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b56dc751c1cc7dff75ed80ccbb45f027058e8ce Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar new file mode 100644 index 0000000000000000000000000000000000000000..10d10b62a49ad095f56d620620ee7eaa5d2fc62d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..3595c4f9be5c0ce779f8dd611e7f6917ca518f5d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a3336373b7aea1700b62d9aa60a15493586c3e8a Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..71079166b7bc51455b1e1d18ea4e5e942b3ae89f Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cbb633729cae09e5d65aefccd7b63c697f42b5cb Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eecfb70406fbaca61c7c9e5a549f77cbef2e849b Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a690bed6986df8a510ee4f05b2079264db7d71af Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5023670d73e75c539b0af285d35c4e9edaef2211 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eb3c2b0b82115e9820f781e944312b4c19b25ed4 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..705f285c9348d57ec059c73b90ed9836f4db6aa4 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..684cc24821451b65b9fc36376131490a9d03b37c Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..36f71a550601f4842536ad1a62a187a30c60eb69 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..b4f917b9687dc231c8f3b4cf3fc0a95e616846ea Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..878cc677337985f59ed9f4bb5cfcdb8ca4d0acbe Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..9afa6d70ae20b7082f786920e918fd70c138a5b3 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b626ceafb52c318581529fafbd1d33889f25c20 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..043052d031f59a3b289cc2bb7dda9b8352c58e11 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..316bdec6ab1a6ea4cd4dc33c9217cdf96e4c9049 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..73e95705d3152472d11fa2a5690626b652ff280a Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..7192fa0e49a0762ad44c218215e6197dd12197b2 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..4ecfb5dbec2f25d201de0a83d1143729830d49bd Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..072d06d46d38bcb7a63efb38075bf79ea111caf1 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e19ecdbd4d53bc0c09246f73926aaae70fe9493 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..08c1d314876730dc6f82ba65e741f03b8719609d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..3427ee30e0e35e876eda5e5f1bed695d8c2636e9 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5facdd8e2f6e4f4fa3ceecc16962b3b15b9627c2 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..575355b83ce351f70a605a46ef5fa7f75f956835 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..39f5787fb3d458de0fd9f575345176b60e050961 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..a1e0f39535831b6f914098b776513c284c50b351 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..9637836909b5e59f7a83f88412a8571b4641f56b Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..93bc0ad010aab29e0a328d2ffafaeb60c82b3ecf Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..f06de9ed2afaa3668bccce11dcfe8924446817d0 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9f5b95529a5c7ac6044ffa4c60e6d412a1597104 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..9edc36a8a46a29b0e5a507fbd092a0e6629b8db7 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fe2973fcc47beca49e8e4061fe60a78c2a27102 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..0aedb22b8903d6eb70232c1e310266e0465b5f54 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..084d172fc3512a05145851e902ab1377a9d8b814 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..b740a4a827c0808baeda7112a5c40158e4eba664 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f99a4fc63360f9f5d1f0d751928b0b93d62ac81 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..75e3104ca9d784fa7fcaca27ecd239c02ac6e241 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3eab7c30fefb9c141226da5595a5faf9ff83d462 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..196cc61fca2b75593a6ac4cd38744fb6fcdc33bd Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..070baec335bcc35a79bd1ddf7ee52e809331c4c3 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..80221e424d28060c8620964fef488107c54f4e94 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c607cbdf717926be37b821d11c9c4b0ed9a2802 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..17262bba0ff1a57d9e893d832f8268389adfe340 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..30ef9922f2c8fe0c6a28cd108f33f9632fe82411 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b3a47cef61d53f9997148c1502dbb4ec313a6a47 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5b530ad1e484936df857104583ffcaf78b1715e3 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..96950282b9778bf3a3bd26d4c9125ffdc0f95c96 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b447f8aeb87c42e4489c0db15f30f93d4707fa45 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..11a94c16234d6e460c6e66a840ac7976e756c458 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d325421d690f179c4626844df98915a36e74ea Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0089e0cb796e6183029df4847b1b600d974a8b1a Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..6abfe2c3ec3923ddea06c4e6f4a4a440d72fa649 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar new file mode 100644 index 0000000000000000000000000000000000000000..99c263dc3f71a215a4a85901538f1dedacb51acf Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d9dbbe1b73c8f5435b7309f77520f6cbff82600c Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5d3b9086c238e3c3dbb2af68ce47f52eb269c664 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c03be12b0b2c7549c2db53ed275e45cd87e8a4dc Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0bf27883a28a42e50d17bca78ce368d79acaea13 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f440dcb8f735c055768cd95c2bc4457c4cbbb48 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e7adeb414fcb6108390beabc72d94e7f933e055d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fba12107dee341bd5894a0e05deb09280561e517 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..48ec53f0dba9ea734d83d21f7d793bb2178bb8d1 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff161c12c4949a473a1d2ed1f597d6338337a321 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..577b2bb43f4750ecb4f5c1b9ba952d3866bcea22 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..10a28407ba7c16165164c35be0f9657812349a84 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d0ee5b30dbe7a8cb00c15600142ef6041a97f44 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e3c7750dd2c267fea46f510636ae576bf26894a Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a38e1850efbb73aaf248318253b11e0d0a64a50e Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9e257c119006bed0c77c8b8a6b2e05adfc71dde6 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a85e9e76c2870fca83485b31334a81a02f12781 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..548a6405f93dd3254c1123ecbcce2f96f6995e30 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f65840267108fe2b704976f14e3c17bd02290e20 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..68b053f16299ba0a9bcd80c6e795c2c9e0b186a1 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..89aaf8be3250033348a32ac75c1edc52d9d264ac Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..add785757c2a958f86e3a20c6034d46a38427d83 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e6f98b3a199f27f5c5ab47000862c0558116f3ef Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa88a2ec11f998225646b08795fedc215e530e6f Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cfd8c45d73d6d6e8da9c8fd24e04b2dcb86bff83 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6438a41b88c1605a994451cda5657f0a52669ad Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4bb4f9457907978ea011d1fc390c6ab2e6217888 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..494d0bb23d9ae6619c2a8f2e18ec458676f39316 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8ec4c28f2f004dc6d7b8ca0d36eeaa4b79f4a4bd Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d5bfe9c8b3c66f75327e6ce621da822c6b67fc Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..90607ec6492a8667ea6d2c39f77389e4083a2017 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8f8612553283b7f00d9dc7ab7eb5313b2f1eca2 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b0c51a8f29ef87a1c6390dd98c2e1e6aa0114f7 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1dcea88498ba439a3ac32e291067331c99331ace Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..92721ae048041c7dbddda0c88d417d740f3c299d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3199cb971aa88ab386df667ad1d16d74d3bee22f Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c0ebec7fa19a8e96489714a2f9986017522becfb Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4696de7fd6cd077015cad5212b223de1dcc80307 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..04bc9cc78782c3b7bc78553481b94a10a02f8f9d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..14d1445dba365c803f94f24f6fcb8467d13cf6e5 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5859e692a03f1173aa1f2b272a844c94d24c12e3 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c8cb76d3f0d9c0853168e740754f4d79042a105c Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b038ad4a83ac6df0e0a8f71265877dad2ded43ab Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c63764d56c7a95f9abf5230b80fae1a414651b61 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df5d82b54357c697c0d95fd9b8db277facfe0cde Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..07d3721a9c5c5e314e5eee21ec168139bcfed6c4 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8f7f3668d903b867c021d47fd08c02c45374e29c Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a774ecd7174486d0c6d077c388f1021c3b2ef05 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f90f28b0d6afa7518cd3095f64d8dbd9fd56b77d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ba5add79fc5dee32ae8d3dfc0ffeebe75541b603 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..d32bd30fa7bfa74da1330810bc3eb3c07f174385 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..cc828adf40879d957e266f0b781f7bb3d864af07 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..08a59e5bb2d5572b0fdbe693f48a3f0a5073d4f0 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b821617242260f35360fb4247a01d66dea563004 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9908f1069718a85031db676224857e26dc7401f0 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d09f435a3cd1ae0395b4926c78311be6b276e0c2 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9bf5d12b59534df460f6ccb01dad8c8ffcd8a542 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3c5639431a5d51ef16ef94d82225effb271e8467 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d55e5f07ddae6c98b08fbff4848461fdbb2adf73 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..760d92fc11fe3672030cdd7c71416ad75b8ac775 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7504902f7621ee1764e207e2f5b2ffdeaa22e261 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..11867e294ef35ca6e9a6a82fb52dd52821c35d48 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e268ff4ef4543b76568d66444df43ce45751d3bc Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..662cf14d5c427f09ea259c484befd4cc2d328b2d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..417d8b59b8ce5998e21e0275beb878ac50008bd8 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d3ab26e2476843ebb0d27c983443ff9a7ba3fee8 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f08a939bd2b90b9f87dc3f15ab88e15e8c48087e Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..abb08bb751b0e037476fc15d4da40a44d5432966 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57bc48ad35c44ab05ee4f8575f552ea0dd3b04c2 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..98d6fa439ad69c1dc9436d76d43e3a34636729ad Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..373471c5b69b1468fff9a50d614c1b84fa69fbdd Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff59cf0f6f80ce2f55e1f08afbd1750c7df6e2fc Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..49443391feae0e4b18c570a3a007db6187108031 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..723430b5a2624a415e2754bad5f69fd151e71dd3 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6169a6b468ab05de2c7962fc64ce8567f46c444 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b7cc8f08c3aa4845416101f90521f8736eef6973 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f1d68c985deb0d3b5c865a91ca3489e94d623dcf Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..99dc47ca15667630606af6d41f5e616acd66b168 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c7f07c892fbe634044ba8b8333aac2195d84a4b0 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..97a49b08f496d01d6a50e3b2455476c5b23ba50d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ece3eb2d02159abb38ed4ea6b8fc4b599616c79e Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9ed9c88db7be8648c5b501b787a1addf9f0c1981 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b8cc43f8b9896a24d916d7bfdbdd02e396d7db1 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b232bcabeac47c30ac02371188d10bdb5d214a88 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8701dd9d98c6c7d61ea547823759f909474f7a9d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..473ce54348410627710157274dd8d2fdfa39c0df Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..16255cf4093cd574a00574f812ee5109478fe9a4 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c3c712e90ce6ebdae145eec147d6d4a50bd0fe53 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..354030f4393f7f920b6e03ff894d8e5ea8727797 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..284a20484992c849fd9a5fb0f83f8f639f5d562f Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..467756a558a61103b2dc767e0833e561540ed8fd Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..477c706ef15c0f6622b55e64d3cb623cb3fd3ebd Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..305727ab77b9dcd6df44f8868efc6859b51a419e Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8c00aa871496b3293e1c008449162d449b9f64ea Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8a4fe6ed7c025ce502f81bbee92b36230457d647 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c322385c3404a0196e8d509f2f7cb892fcb0883a Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..2e158629507bc39be5c13b1b31fa3563e0ac44b1 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..b16bdbbc285bd848e6dac42208acfa3d6bfa6fab Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..0d015d54205c084ad31609cc2909853fee83476a Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..34c1d4394b358572a2c79b543cb7d094b0ba1b8d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..316080f3b56134e8a821a50511b0e831886c9184 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5edfd71412f1470c93366d821b353ebaa85ecac3 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a3214715591141e2d6fbb0f5b71f52126a571a5 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a92e099d4fc25523e2830fa9b8181d319c9369a7 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a5b5b5fc36bb88093fd25a30b2d1d7fbe9e3cba Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..dbcbea90f1434f04d12c2039f9213c704d82ec31 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a5d9089734b0a7061dc14c4afc35884cc507636 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1e7da4be43dd1a10393608d1aff9c7a87460461 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..80670a1b87a7680fbac8c690d599361f8dd8d2ea Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..48242829bc38133a9cdcd36f8b2a9eebc53ab91a Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bffe4865f5835839900292dce062bb2f24921d76 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d8c4c56c7943f2fb3fcf2207f77f3bb7dd623550 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57b4cba6199355aa6c8fc9ad96e01f122e8613ff Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c4fe13ea86560b5be41f0602c8cef57dac8fed8 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d3e9ba64ae347356342ce7bd81694068426ce4 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..29fb4d66863e0ed67c991f16d7224d8e1b88ed98 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..83006f2ec04934050d5d41b033761f8c6f93c80a Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..66bc6931d673d72136bf7634d9d0653576bb05de Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fb9dadfc4843c7b3bd48544fe9c39068014ae49 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57eb121f34753b359ed328615b12d1335cdef5e0 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..666c3e4d030b41a58901b540d76547698cea41b4 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f5768a039af60a776a24b738659227810a5fc563 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b2d092cdf3c4b145df42a44b536ed0f8a604574c Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..08f353ce447773c14714390536f2abde814dff5b Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df32ff7e11ad182102a805c854bfc71cf2fec90e Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a93a240a399ca96f7f6298a849de2fdc996f066d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8b05ebef8ea87d46d09eac112c1e8a5a9a549fc Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..eab628714f1e46bbabce09f59d5149071cb7b8ee Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bcf2ba147eb2b4d9c75a637aed2c96628a8369cd Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e12f44ed11522b1f45d2935972336fb6852cb26d Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7b0a2bef9c92372239c8136f4bacad2e44297a95 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..97e7161abe0534de78448dde1d119002283ced55 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a4e88280321db778bbdff6fce3aed8e6ce6e4980 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..da9302ff29a560b5f10d3184f25d699fe2d9c186 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..2bb7c07363c9a44ea63fe96c827a34e296b8188c Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..9a8ac703dcd1b00c37aa6f8dc9a8a9b3d42145f6 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar new file mode 100644 index 0000000000000000000000000000000000000000..f0bdebeb94bce461c49ded7e28d6e6c63bf6a367 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b3c8ff0adc42f592363a883cd691d292aada837 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa7fdabb307af8221e7e0a1526f2c97d6ba35ec4 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..49953e8fa25ed42f4127011561a6e84869fe5d82 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..d139727d04b6b6acdfcb520566c8c60cbbcb7fb1 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6077c71e50c276649060a8fac39f6384fa67019 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d2e53ffaa9f0685843fd2b35fe18afd543249a Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c9b38d9c315bae8eb8c7d4eeacb26a8f2ca16085 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..50933a1fdc4d2f285542845bb89f9b34cef192f2 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..45581840b5a7e858949ee6198f8a1f7bd772fb32 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar new file mode 100644 index 0000000000000000000000000000000000000000..e3432492d5d204a1745e138497d9ede28ffb854e Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d2ade3e92ab51059b64ce3026377e1bd5991777 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ed4bb45df0ea4a6cb61ce9c7247b6de29ab4f98a Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..816ea17eb611606001129921b297615bcd2419fd Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..ffc43704764cd535486c28401c92f2548bd52c5f Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..0e9fcc76760c4fc3fe5fbce69a4a37783d912a11 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..d7987d7c1b270f153557179abaf61c87ed62e875 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d218bacf88c766dba04ef14130fd7e69181ffed Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e90b7796c1a5d7264bc613050e05f20a674c010 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa3474de5d48916f2ded456deab5671a1375645b Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..657afffffdf6324a62474b9499dd50cf9529609f Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..26cbae47e5f0db078fc43501ed3d1b4f95a1befa Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a66a92ba5247b5a1f8e201d11643e4fb11c2a8c Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..8da196f20fde587682295ac0c90f31ba4ab23815 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..31cf0b60867242d385d764dcea99adadf7ed6ded Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..89c644b8e286e9da107d81de25f1be0fe6447607 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..d530cd128ec0d314490c0e1e5ef68479cd23d366 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..091b4d13a417d635f5a1d7a42b482f88a6f3bf65 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..e58c483f97589c9712eda2273a37e25344573390 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar new file mode 100644 index 0000000000000000000000000000000000000000..a2cb8020a5afda869b487e2f9d172dcd1e9795bf Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..511ff3211d9b29bce06c3576ddcf0139fc874bb0 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..7707e5878b8525da8750949186a3ab1056ecb5c5 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..275dd5700a389ba1902a02d49e465157942368ce Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus-app-dependencies.txt b/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus-app-dependencies.txt new file mode 100644 index 0000000000000000000000000000000000000000..0aa0b1a4e39e9ac14c9739186a382f5a7784a7d6 --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus-app-dependencies.txt @@ -0,0 +1,258 @@ +com.aayushatharva.brotli4j:brotli4j::jar:1.16.0 +com.aayushatharva.brotli4j:native-linux-x86_64::jar:1.16.0 +com.aayushatharva.brotli4j:service::jar:1.16.0 +com.cronutils:cron-utils::jar:9.2.1 +com.fasterxml.jackson.core:jackson-annotations::jar:2.17.2 +com.fasterxml.jackson.core:jackson-core::jar:2.17.2 +com.fasterxml.jackson.core:jackson-databind::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-toml::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-yaml::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jdk8::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jsr310::jar:2.17.2 +com.fasterxml.jackson.module:jackson-module-parameter-names::jar:2.17.2 +com.github.ben-manes.caffeine:caffeine::jar:3.1.5 +com.github.filosganga:geogson-core::jar:1.2.21 +com.github.filosganga:geogson-jts::jar:1.2.21 +com.github.luben:zstd-jni::jar:1.5.6-3 +com.google.code.gson:gson::jar:2.11.0 +com.google.errorprone:error_prone_annotations::jar:2.30.0 +com.google.guava:failureaccess::jar:1.0.1 +com.google.guava:guava::jar:33.2.1-jre +com.google.j2objc:j2objc-annotations::jar:2.8 +com.ongres.scram:client::jar:2.1 +com.ongres.scram:common::jar:2.1 +com.ongres.stringprep:saslprep::jar:1.1 +com.ongres.stringprep:stringprep::jar:1.1 +com.vividsolutions:jts-core::jar:1.14.0 +commons-codec:commons-codec::jar:1.17.1 +commons-io:commons-io::jar:2.16.1 +eu.neclab.ngsildbroker:commons::jar:5.0.5-SNAPSHOT +io.agroal:agroal-api::jar:2.5 +io.agroal:agroal-narayana::jar:2.5 +io.agroal:agroal-pool::jar:2.5 +io.github.crac:org-crac::jar:0.1.3 +io.netty:netty-buffer::jar:4.1.111.Final +io.netty:netty-codec-dns::jar:4.1.111.Final +io.netty:netty-codec-haproxy::jar:4.1.111.Final +io.netty:netty-codec-http2::jar:4.1.111.Final +io.netty:netty-codec-http::jar:4.1.111.Final +io.netty:netty-codec-mqtt::jar:4.1.111.Final +io.netty:netty-codec-socks::jar:4.1.111.Final +io.netty:netty-codec::jar:4.1.111.Final +io.netty:netty-common::jar:4.1.111.Final +io.netty:netty-handler-proxy::jar:4.1.111.Final +io.netty:netty-handler::jar:4.1.111.Final +io.netty:netty-resolver-dns::jar:4.1.111.Final +io.netty:netty-resolver::jar:4.1.111.Final +io.netty:netty-transport-classes-epoll::jar:4.1.111.Final +io.netty:netty-transport-classes-kqueue::jar:4.1.111.Final +io.netty:netty-transport-native-epoll:linux-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-kqueue:osx-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-unix-common::jar:4.1.111.Final +io.netty:netty-transport::jar:4.1.111.Final +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api-incubator::jar:2.5.0-alpha +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api::jar:2.5.0 +io.opentelemetry.semconv:opentelemetry-semconv::jar:1.26.0-alpha +io.opentelemetry:opentelemetry-api-incubator::jar:1.39.0-alpha +io.opentelemetry:opentelemetry-api::jar:1.39.0 +io.opentelemetry:opentelemetry-context::jar:1.39.0 +io.quarkiverse.loggingmanager:quarkus-logging-manager::jar:3.1.2 +io.quarkiverse.systemd.notify:quarkus-systemd-notify::jar:1.0.2 +io.quarkus.arc:arc::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common-types::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-jackson::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-vertx::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive::jar:3.14.1 +io.quarkus.security:quarkus-security::jar:2.1.0 +io.quarkus.vertx.utils:quarkus-vertx-utils::jar:3.14.1 +io.quarkus:quarkus-agroal::jar:3.14.1 +io.quarkus:quarkus-arc::jar:3.14.1 +io.quarkus:quarkus-bootstrap-runner::jar:3.14.1 +io.quarkus:quarkus-cache-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-cache::jar:3.14.1 +io.quarkus:quarkus-caffeine::jar:3.14.1 +io.quarkus:quarkus-classloader-commons::jar:3.14.1 +io.quarkus:quarkus-container-image-docker-common::jar:3.14.1 +io.quarkus:quarkus-container-image-docker::jar:3.14.1 +io.quarkus:quarkus-container-image::jar:3.14.1 +io.quarkus:quarkus-core::jar:3.14.1 +io.quarkus:quarkus-credentials::jar:3.14.1 +io.quarkus:quarkus-datasource-common::jar:3.14.1 +io.quarkus:quarkus-datasource::jar:3.14.1 +io.quarkus:quarkus-development-mode-spi::jar:3.14.1 +io.quarkus:quarkus-flyway-postgresql::jar:3.14.1 +io.quarkus:quarkus-flyway::jar:3.14.1 +io.quarkus:quarkus-fs-util::jar:0.0.10 +io.quarkus:quarkus-info-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-info::jar:3.14.1 +io.quarkus:quarkus-jackson::jar:3.14.1 +io.quarkus:quarkus-jdbc-postgresql::jar:3.14.1 +io.quarkus:quarkus-jsonp::jar:3.14.1 +io.quarkus:quarkus-kafka-client::jar:3.14.1 +io.quarkus:quarkus-messaging-kafka::jar:3.14.1 +io.quarkus:quarkus-messaging-kotlin::jar:3.14.1 +io.quarkus:quarkus-messaging::jar:3.14.1 +io.quarkus:quarkus-mutiny-reactive-streams-operators::jar:3.14.1 +io.quarkus:quarkus-mutiny::jar:3.14.1 +io.quarkus:quarkus-narayana-jta::jar:3.14.1 +io.quarkus:quarkus-netty::jar:3.14.1 +io.quarkus:quarkus-reactive-datasource::jar:3.14.1 +io.quarkus:quarkus-reactive-pg-client::jar:3.14.1 +io.quarkus:quarkus-rest-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson::jar:3.14.1 +io.quarkus:quarkus-rest::jar:3.14.1 +io.quarkus:quarkus-scheduler-api::jar:3.14.1 +io.quarkus:quarkus-scheduler-common::jar:3.14.1 +io.quarkus:quarkus-scheduler-kotlin::jar:3.14.1 +io.quarkus:quarkus-scheduler-spi::jar:3.14.1 +io.quarkus:quarkus-scheduler::jar:3.14.1 +io.quarkus:quarkus-security-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-smallrye-context-propagation::jar:3.14.1 +io.quarkus:quarkus-smallrye-health::jar:3.14.1 +io.quarkus:quarkus-smallrye-metrics::jar:3.14.1 +io.quarkus:quarkus-smallrye-openapi::jar:3.14.1 +io.quarkus:quarkus-swagger-ui::jar:3.14.1 +io.quarkus:quarkus-tls-registry::jar:3.14.1 +io.quarkus:quarkus-transaction-annotations::jar:3.14.1 +io.quarkus:quarkus-vertx-http::jar:3.14.1 +io.quarkus:quarkus-vertx-latebound-mdc-provider::jar:3.14.1 +io.quarkus:quarkus-vertx::jar:3.14.1 +io.quarkus:quarkus-virtual-threads::jar:3.14.1 +io.smallrye.common:smallrye-common-annotation::jar:2.5.0 +io.smallrye.common:smallrye-common-classloader::jar:2.5.0 +io.smallrye.common:smallrye-common-constraint::jar:2.5.0 +io.smallrye.common:smallrye-common-cpu::jar:2.5.0 +io.smallrye.common:smallrye-common-expression::jar:2.5.0 +io.smallrye.common:smallrye-common-function::jar:2.5.0 +io.smallrye.common:smallrye-common-io::jar:2.5.0 +io.smallrye.common:smallrye-common-net::jar:2.5.0 +io.smallrye.common:smallrye-common-os::jar:2.5.0 +io.smallrye.common:smallrye-common-ref::jar:2.5.0 +io.smallrye.common:smallrye-common-vertx-context::jar:2.5.0 +io.smallrye.config:smallrye-config-common::jar:3.9.1 +io.smallrye.config:smallrye-config-core::jar:3.9.1 +io.smallrye.config:smallrye-config::jar:3.9.1 +io.smallrye.reactive:mutiny-reactive-streams-operators::jar:2.6.2 +io.smallrye.reactive:mutiny-smallrye-context-propagation::jar:2.6.2 +io.smallrye.reactive:mutiny-zero-flow-adapters::jar:1.1.0 +io.smallrye.reactive:mutiny-zero::jar:1.1.0 +io.smallrye.reactive:mutiny::jar:2.6.2 +io.smallrye.reactive:smallrye-mutiny-vertx-auth-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-bridge-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-core::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-mqtt::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-pg-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-runtime::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-sql-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-uri-template::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web::jar:3.14.0 +io.smallrye.reactive:smallrye-reactive-converter-api::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-converter-mutiny::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-messaging-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-camel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-health::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-in-memory::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-otel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-provider::jar:4.24.0 +io.smallrye.reactive:vertx-mutiny-generator::jar:3.14.0 +io.smallrye:jandex::jar:3.2.2 +io.smallrye:smallrye-context-propagation-api::jar:2.1.2 +io.smallrye:smallrye-context-propagation-jta::jar:2.1.2 +io.smallrye:smallrye-context-propagation-storage::jar:2.1.2 +io.smallrye:smallrye-context-propagation::jar:2.1.2 +io.smallrye:smallrye-fault-tolerance-vertx::jar:6.4.0 +io.smallrye:smallrye-health-api::jar:4.1.0 +io.smallrye:smallrye-health-provided-checks::jar:4.1.0 +io.smallrye:smallrye-health::jar:4.1.0 +io.smallrye:smallrye-metrics::jar:4.0.0 +io.smallrye:smallrye-open-api-core::jar:3.10.0 +io.vertx:vertx-auth-common::jar:4.5.9 +io.vertx:vertx-bridge-common::jar:4.5.9 +io.vertx:vertx-codegen::jar:4.5.9 +io.vertx:vertx-core::jar:4.5.9 +io.vertx:vertx-mqtt::jar:4.5.9 +io.vertx:vertx-pg-client::jar:4.5.9 +io.vertx:vertx-sql-client::jar:4.5.9 +io.vertx:vertx-uri-template::jar:4.5.9 +io.vertx:vertx-web-client::jar:4.5.9 +io.vertx:vertx-web-common::jar:4.5.9 +io.vertx:vertx-web::jar:4.5.9 +jakarta.activation:jakarta.activation-api::jar:2.1.3 +jakarta.annotation:jakarta.annotation-api::jar:3.0.0 +jakarta.el:jakarta.el-api::jar:5.0.1 +jakarta.enterprise:jakarta.enterprise.cdi-api::jar:4.1.0 +jakarta.enterprise:jakarta.enterprise.lang-model::jar:4.1.0 +jakarta.inject:jakarta.inject-api::jar:2.0.1 +jakarta.interceptor:jakarta.interceptor-api::jar:2.2.0 +jakarta.json:jakarta.json-api::jar:2.1.3 +jakarta.resource:jakarta.resource-api::jar:2.1.0 +jakarta.transaction:jakarta.transaction-api::jar:2.0.1 +jakarta.ws.rs:jakarta.ws.rs-api::jar:3.1.0 +jakarta.xml.bind:jakarta.xml.bind-api::jar:4.0.2 +org.apache.camel.quarkus:camel-quarkus-core::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-reactive-streams::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-smallrye-reactive-messaging::jar:3.14.0 +org.apache.camel:camel-api::jar:4.7.0 +org.apache.camel:camel-base-engine::jar:4.7.0 +org.apache.camel:camel-base::jar:4.7.0 +org.apache.camel:camel-componentdsl::jar:4.7.0 +org.apache.camel:camel-core-catalog::jar:4.7.0 +org.apache.camel:camel-core-engine::jar:4.7.0 +org.apache.camel:camel-core-languages::jar:4.7.0 +org.apache.camel:camel-core-model::jar:4.7.0 +org.apache.camel:camel-core-processor::jar:4.7.0 +org.apache.camel:camel-core-reifier::jar:4.7.0 +org.apache.camel:camel-endpointdsl::jar:4.7.0 +org.apache.camel:camel-main::jar:4.7.0 +org.apache.camel:camel-management-api::jar:4.7.0 +org.apache.camel:camel-microprofile-config::jar:4.7.0 +org.apache.camel:camel-reactive-streams::jar:4.7.0 +org.apache.camel:camel-support::jar:4.7.0 +org.apache.camel:camel-tooling-model::jar:4.7.0 +org.apache.camel:camel-util-json::jar:4.7.0 +org.apache.camel:camel-util::jar:4.7.0 +org.apache.camel:camel-xml-jaxp-util::jar:4.7.0 +org.apache.commons:commons-lang3::jar:3.14.0 +org.apache.httpcomponents:httpclient-cache::jar:4.5.14 +org.apache.httpcomponents:httpclient::jar:4.5.14 +org.apache.httpcomponents:httpcore::jar:4.4.16 +org.apache.kafka:kafka-clients::jar:3.7.1 +org.checkerframework:checker-qual::jar:3.46.0 +org.eclipse.microprofile.config:microprofile-config-api::jar:3.1 +org.eclipse.microprofile.context-propagation:microprofile-context-propagation-api::jar:1.3 +org.eclipse.microprofile.health:microprofile-health-api::jar:4.0.1 +org.eclipse.microprofile.metrics:microprofile-metrics-api::jar:4.0.1 +org.eclipse.microprofile.openapi:microprofile-openapi-api::jar:3.1.1 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-api::jar:3.0 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-core::jar:3.0 +org.eclipse.parsson:parsson::jar:1.1.7 +org.flywaydb:flyway-core::jar:10.17.1 +org.flywaydb:flyway-database-postgresql::jar:10.17.1 +org.glassfish.expressly:expressly::jar:5.0.0 +org.jboss.invocation:jboss-invocation::jar:2.0.0.Final +org.jboss.logging:commons-logging-jboss-logging::jar:1.0.0.Final +org.jboss.logging:jboss-logging-annotations::jar:3.0.1.Final +org.jboss.logging:jboss-logging::jar:3.6.0.Final +org.jboss.logmanager:jboss-logmanager::jar:3.0.6.Final +org.jboss.narayana.jta:narayana-jta::jar:7.0.2.Final +org.jboss.narayana.jts:narayana-jts-integration::jar:7.0.2.Final +org.jboss.slf4j:slf4j-jboss-logmanager::jar:2.0.0.Final +org.jboss.threads:jboss-threads::jar:3.6.1.Final +org.jboss:jboss-transaction-spi::jar:8.0.0.Final +org.jctools:jctools-core::jar:4.0.5 +org.locationtech.jts:jts-core::jar:1.18.2 +org.locationtech.spatial4j:spatial4j::jar:0.8 +org.lz4:lz4-java::jar:1.8.0 +org.noggit:noggit::jar:0.8 +org.postgresql:postgresql::jar:42.7.4 +org.reactivestreams:reactive-streams::jar:1.0.4 +org.slf4j:slf4j-api::jar:2.0.6 +org.wildfly.common:wildfly-common::jar:1.7.0.Final +org.xerial.snappy:snappy-java::jar:1.1.10.5 +org.yaml:snakeyaml::jar:2.2 diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus-run.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus-run.jar new file mode 100644 index 0000000000000000000000000000000000000000..0c18bb0391947683b728f3ca5c12a049fa0c62cc Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus-run.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus/generated-bytecode.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus/generated-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..72bb26ba78c886908421b75d0070633057260b50 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus/generated-bytecode.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus/quarkus-application.dat b/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus/quarkus-application.dat new file mode 100644 index 0000000000000000000000000000000000000000..2646459ca11aef52b1c441f3a3bc5efee69ae072 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus/quarkus-application.dat differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus/transformed-bytecode.jar b/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus/transformed-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..40c10aded41a42eff60c03166205c1fb82fcc4a3 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/quarkus-app/quarkus/transformed-bytecode.jar differ diff --git a/scorpio-broker/HistoryQueryManager/target/quarkus-artifact.properties b/scorpio-broker/HistoryQueryManager/target/quarkus-artifact.properties new file mode 100644 index 0000000000000000000000000000000000000000..3fdc18e728a2ed1f4848b222a8d0ed10c0eb71fa --- /dev/null +++ b/scorpio-broker/HistoryQueryManager/target/quarkus-artifact.properties @@ -0,0 +1,4 @@ +# Generated by Quarkus - Do not edit manually +metadata.container-image=scorpiobroker/history-query-manager\:5.0.5-SNAPSHOT +metadata.pull-required=false +type=jar-container diff --git a/scorpio-broker/HistoryQueryManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/CustomProfile.class b/scorpio-broker/HistoryQueryManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/CustomProfile.class new file mode 100644 index 0000000000000000000000000000000000000000..dc405581d0c7f147ac1ec4aa3b0a5766fa53b846 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/CustomProfile.class differ diff --git a/scorpio-broker/HistoryQueryManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/EntityOperationsHistoryControllerTest.class b/scorpio-broker/HistoryQueryManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/EntityOperationsHistoryControllerTest.class new file mode 100644 index 0000000000000000000000000000000000000000..130a8dc321f3358765729c66a8c5d19fcae15ed1 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/EntityOperationsHistoryControllerTest.class differ diff --git a/scorpio-broker/HistoryQueryManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/HistoryControllerTest.class b/scorpio-broker/HistoryQueryManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/HistoryControllerTest.class new file mode 100644 index 0000000000000000000000000000000000000000..def879a4dbdc33fe69e7ff98c32f0190b40d6c07 Binary files /dev/null and b/scorpio-broker/HistoryQueryManager/target/test-classes/eu/neclab/ngsildbroker/historymanager/controller/HistoryControllerTest.class differ diff --git a/scorpio-broker/LICENSE b/scorpio-broker/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f73adf3f347886c1fa77caa08e7a14ffa543bbb9 --- /dev/null +++ b/scorpio-broker/LICENSE @@ -0,0 +1,31 @@ +BSD 4-Clause License + +Copyright (c) 2019, NEC +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * All advertising materials mentioning features or use of this software + must display the following acknowledgement: This product includes + software developed by NEC Laboratories Europe and its contributors. + * Neither the name of NEC Laboratories Europe nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/scorpio-broker/OverallParent/.gitignore b/scorpio-broker/OverallParent/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..a1c3ab4d08c0f9f91918f21c730272a4711885e8 --- /dev/null +++ b/scorpio-broker/OverallParent/.gitignore @@ -0,0 +1,4 @@ +/target/ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/OverallParent/pom.xml b/scorpio-broker/OverallParent/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..68ec0ed0b654257765acc5cd103643d2bb69bb4d --- /dev/null +++ b/scorpio-broker/OverallParent/pom.xml @@ -0,0 +1,180 @@ + + 4.0.0 + eu.neclab.ngsildbroker + OverallParent + 1.0.0-SNAPSHOT + NGSI-LD Broker Overall Parent + + + pom + + + UTF-8 + UTF-8 + 1.8 + Greenwich.RELEASE + + + + buildForTest + + ${project.artifactId} + + + + java-above-8-support + + [9.0,) + + + + com.sun.xml.bind + jaxb-core + 2.3.0.1 + + + javax.xml.bind + jaxb-api + 2.3.1 + + + com.sun.xml.bind + jaxb-impl + 2.3.1 + + + + + + + + org.springframework.cloud + spring-cloud-dependencies + ${spring-cloud.version} + pom + import + + + + org.springframework.boot + spring-boot-dependencies + 2.1.9.RELEASE + pom + import + + + + + + org.springframework.boot + spring-boot-starter-test + + test + + + org.springframework.boot + spring-boot-starter-web + + + + org.springframework.cloud + spring-cloud-stream + + + + org.springframework.cloud + spring-cloud-stream-binder-kafka + + + + org.springframework.kafka + spring-kafka + + + org.springframework.boot + spring-boot-starter-log4j2 + + + org.springframework.boot + spring-boot-starter-actuator + + + io.springfox + springfox-swagger2 + 2.9.2 + + + io.springfox + springfox-swagger-ui + 2.9.2 + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + org.apache.maven.plugins + maven-compiler-plugin + 3.7.0 + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-dependency-plugin + 3.0.2 + + + + + + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + diff --git a/scorpio-broker/QueryManager/target/classes/META-INF/jandex.idx b/scorpio-broker/QueryManager/target/classes/META-INF/jandex.idx new file mode 100644 index 0000000000000000000000000000000000000000..a19c0a08425267fa64a0daf585d695ade6796266 Binary files /dev/null and b/scorpio-broker/QueryManager/target/classes/META-INF/jandex.idx differ diff --git a/scorpio-broker/QueryManager/target/classes/application-activemq.properties b/scorpio-broker/QueryManager/target/classes/application-activemq.properties new file mode 100644 index 0000000000000000000000000000000000000000..e2bc37754abee036c0745e3c4723b6a25ecad6be --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/application-activemq.properties @@ -0,0 +1,14 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:61616} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=activemq +mysettings.messageconnection.options= +camel.component.activemq.broker-url=${bootstrap.servers} + + +scorpio.messaging.maxSize=1048576 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/application-kafka.properties b/scorpio-broker/QueryManager/target/classes/application-kafka.properties new file mode 100644 index 0000000000000000000000000000000000000000..3518ce96be461f3737f3a0854230c6dd657d86fa --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/application-kafka.properties @@ -0,0 +1,11 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:9092} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +scorpio.messaging.maxSize=1048576 +#Kafka settings +kafka.bootstrap.servers=${bootstrap.servers} +#readability block########### +mp.messaging.incoming.registryretrieve.connector=smallrye-kafka +mp.messaging.incoming.registryretrieve.topic=${scorpio.topics.registry} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/application-mqtt.properties b/scorpio-broker/QueryManager/target/classes/application-mqtt.properties new file mode 100644 index 0000000000000000000000000000000000000000..dc3b077c5526fac55287a03092c17bc8ab9a8e62 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/application-mqtt.properties @@ -0,0 +1,15 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:1883} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=paho-mqtt5 +mysettings.messageconnection.options= +camel.component.paho-mqtt5.broker-url=tcp://${bootstrap.servers} + +scorpio.messaging.maxSize=268435455 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true + + diff --git a/scorpio-broker/QueryManager/target/classes/application-rabbitmq.properties b/scorpio-broker/QueryManager/target/classes/application-rabbitmq.properties new file mode 100644 index 0000000000000000000000000000000000000000..55045f21d369f5e2c9c308f6e4e0bec3de9852aa --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/application-rabbitmq.properties @@ -0,0 +1,16 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:5672} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=rabbitmq +mysettings.messageconnection.options= +#?addresses=localhost:5672 +scorpio.messaging.maxSize=134217728 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +camel.component.rabbitmq.hostname=localhost +camel.component.rabbitmq.port-number=5672 + + diff --git a/scorpio-broker/QueryManager/target/classes/application-sqs.properties b/scorpio-broker/QueryManager/target/classes/application-sqs.properties new file mode 100644 index 0000000000000000000000000000000000000000..0f9327813b0d3b85f0e6d766f70aae82ac444af1 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/application-sqs.properties @@ -0,0 +1,15 @@ +#mysettings.kafka.bootstrap.host=${bushost:localhost} +#mysettings.kafka.bootstrap.port=${busport:5672} +#bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +#camel.component.aws2-sqs.maximum-message-size=10485760 +mysettings.messageconnection.protocol=sns-fanout + +mysettings.messageconnection.options= +#?addresses=localhost:5672 +scorpio.messaging.maxSize=262144 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true + diff --git a/scorpio-broker/QueryManager/target/classes/application.properties b/scorpio-broker/QueryManager/target/classes/application.properties new file mode 100644 index 0000000000000000000000000000000000000000..47a5e87366b00192b9d46a3309799f968ffe4adc --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/application.properties @@ -0,0 +1,42 @@ +quarkus.application.name=query-manager +quarkus.http.port=1026 +quarkus.ssl.native=true +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:9092} +mysettings.postgres.host=${dbhost:localhost} +mysettings.postgres.port=${dbport:5432} +mysettings.postgres.username=${dbuser:ngb} +mysettings.postgres.password=${dbpass:ngb} +mysettings.postgres.database-name=${dbname:ngb} +mysettings.gateway.host=${gateway.host:localhost} +mysettings.gateway.port=${gateway.port:9090} +scorpio.at-context-server=http://at-context-server:2023 +atcontext.url=${scorpio.at-context-server}/ngsi-ld/v1/jsonldContexts/ +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +jdbcurl=jdbc:postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name}?ApplicationName=${quarkus.application.name} +scorpio.gatewayurl=http://localhost:9090 +scorpio.directDB=true +scorpio.history.autorecording=active +scorpio.history.max-limit=1000 +scorpio.history.default-limit=50 +scorpio.entity.batch-operations.query.max=1000 +scorpio.startupdelay=5s +scorpio.entitymap.cleanup.ttl=30 sec +scorpio.entitymap.cleanup.schedule=10s +scorpio.topics.registry=REGISTRY +#Database settings +quarkus.datasource.db-kind=postgresql +quarkus.datasource.username=${mysettings.postgres.username} +quarkus.datasource.password=${mysettings.postgres.password} +quarkus.datasource.jdbc.url=${jdbcurl} +quarkus.datasource.reactive.url=postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name} +quarkus.datasource.reactive.shared=true +quarkus.datasource.reactive.max-size=20 +#quarkus.datasource.reactive.name=blabliblub +quarkus.flyway.migrate-at-start=true +quarkus.flyway.baseline-on-migrate=true +quarkus.flyway.connect-retries=10 +quarkus.flyway.repair-at-start=true +selfhostcorecontext=http://localhost:9090/corecontext +ngsild.corecontext=https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context-v1.3.jsonld \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20190604.1__entity.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20190604.1__entity.sql new file mode 100644 index 0000000000000000000000000000000000000000..a8259f8bf603988add0925c6985b979640cdc13e --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20190604.1__entity.sql @@ -0,0 +1,57 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS Entity ( + id TEXT NOT NULL, + type TEXT, + data JSONB NOT NULL, + context JSONB, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + observationSpace GEOMETRY(Geometry, 4326), + operationSpace GEOMETRY(Geometry, 4326), + PRIMARY KEY (id)) +; + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER entity_extract_jsonb_fields BEFORE INSERT OR UPDATE ON entity + FOR EACH ROW EXECUTE PROCEDURE entity_extract_jsonb_fields(); + +-- create indexes for performance +CREATE INDEX i_entity_type ON entity (type); +CREATE INDEX i_entity_createdat ON entity (createdat); +CREATE INDEX i_entity_modifiedat ON entity (modifiedat); +CREATE INDEX i_entity_location ON entity USING GIST (location); +CREATE INDEX i_entity_observationspace ON entity USING GIST (observationspace); +CREATE INDEX i_entity_operationspace ON entity USING GIST (operationspace); + +-- to check if this index will be used by the database optimizer, or if it should be applied only for for certain keys +-- check https://www.postgresql.org/docs/current/static/datatype-json.html +CREATE INDEX i_entity_data ON entity USING GIN (data); diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20190604.2__registry.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20190604.2__registry.sql new file mode 100644 index 0000000000000000000000000000000000000000..51d767f1a6ee0368765817ebce039132d1160bd9 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20190604.2__registry.sql @@ -0,0 +1,135 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS csource ( + id TEXT NOT NULL, + data JSONB NOT NULL, + type TEXT, + name TEXT, + description TEXT, + timestamp_start TIMESTAMP, + timestamp_end TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + expires TIMESTAMP, + endpoint TEXT, + internal boolean default false, + has_registrationinfo_with_attrs_only BOOL NOT NULL DEFAULT FALSE, + has_registrationinfo_with_entityinfo_only BOOL NOT NULL DEFAULT FALSE, + PRIMARY KEY (id)) +; + +-- create indexes for performance +CREATE INDEX i_csource_data ON csource USING GIN (data); +CREATE INDEX i_csource_name ON csource (name); +CREATE INDEX i_csource_timestamp_start ON csource (timestamp_start); +CREATE INDEX i_csource_timestamp_end ON csource (timestamp_end); +CREATE INDEX i_csource_location ON csource USING GIST (location); +CREATE INDEX i_csource_expires ON csource (expires); +CREATE INDEX i_csource_endpoint ON csource (endpoint); +CREATE INDEX i_csource_internal ON csource (internal); + +CREATE TABLE IF NOT EXISTS csourceinformation ( + id BIGSERIAL, + csource_id TEXT NOT NULL REFERENCES csource(id) ON DELETE CASCADE ON UPDATE CASCADE, + group_id BIGINT, + entity_id TEXT, + entity_idpattern TEXT, + entity_type TEXT, + property_id TEXT, + relationship_id TEXT, + PRIMARY KEY (id)) +; +CREATE SEQUENCE csourceinformation_group_id_seq OWNED BY csourceinformation.group_id; -- used by csource trigger +-- create indexes for performance +CREATE INDEX i_csourceinformation_csource_id ON csourceinformation (csource_id); +CREATE INDEX i_csourceinformation_entity_type_id_idpattern ON csourceinformation (entity_type, entity_id, entity_idpattern); +CREATE INDEX i_csourceinformation_entity_type_id ON csourceinformation (entity_type, entity_id); +CREATE INDEX i_csourceinformation_entity_type_idpattern ON csourceinformation (entity_type, entity_idpattern); +CREATE INDEX i_csourceinformation_property_id ON csourceinformation (property_id); +CREATE INDEX i_csourceinformation_relationship_id ON csourceinformation (relationship_id); +CREATE INDEX i_csourceinformation_group_property_relationship ON csourceinformation (group_id, property_id, relationship_id); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields_to_information_table(); \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20190604.3__temporal.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20190604.3__temporal.sql new file mode 100644 index 0000000000000000000000000000000000000000..bc9d603a2b3ed51124507c18e77df266bb80c91a --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20190604.3__temporal.sql @@ -0,0 +1,105 @@ +BEGIN; + +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS temporalentity ( + id TEXT NOT NULL, + type TEXT, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + PRIMARY KEY (id)) +; + +CREATE TABLE IF NOT EXISTS temporalentityattrinstance ( + internalid BIGSERIAL, + temporalentity_id TEXT NOT NULL REFERENCES temporalentity(id) ON DELETE CASCADE ON UPDATE CASCADE, + attributeid TEXT NOT NULL, + instanceid TEXT, + attributetype TEXT, + value TEXT, -- object (relationship) is also stored here + geovalue GEOMETRY, + createdat TIMESTAMP, + modifiedat TIMESTAMP, + observedat TIMESTAMP, + data JSONB NOT NULL, + static BOOL NOT NULL, + PRIMARY KEY (internalid)) +; +CREATE UNIQUE INDEX i_temporalentityattrinstance_entityid_attributeid_instanceid ON temporalentityattrinstance (temporalentity_id, attributeid, instanceid); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_extract_jsonb_fields BEFORE INSERT OR UPDATE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_update_static() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + f_count integer; + BEGIN + select into f_internalid, f_count min(internalid), count(1) from temporalentityattrinstance + where temporalentity_id = OLD.temporalentity_id AND attributeid = OLD.attributeid; + IF (f_count = 1) THEN + UPDATE temporalentityattrinstance SET static = true WHERE internalid = f_internalid; + END IF; + RETURN OLD; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_update_static AFTER DELETE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_update_static(); + +-- create indexes for performance + +CREATE INDEX i_temporalentity_type ON temporalentity (type); + +CREATE INDEX i_temporalentityattrinstance_data ON temporalentityattrinstance USING GIN (data); + +COMMIT; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20190611.1__sysattrs.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20190611.1__sysattrs.sql new file mode 100644 index 0000000000000000000000000000000000000000..3e24ff4111eb19227e14f1b3a2b90a2d863f337e --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20190611.1__sysattrs.sql @@ -0,0 +1,7 @@ +-- entity +ALTER TABLE entity ALTER data DROP NOT NULL; +ALTER TABLE entity ADD data_without_sysattrs JSONB; + +-- csource +ALTER TABLE csource ALTER data DROP NOT NULL; +ALTER TABLE csource ADD data_without_sysattrs JSONB; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ab6fe3c66556cde740eaf56948e95d4556fa7c9 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql @@ -0,0 +1,121 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20190703.1__keyvalues.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20190703.1__keyvalues.sql new file mode 100644 index 0000000000000000000000000000000000000000..fae8d021431fadf39732600f684e69f5aa43447a --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20190703.1__keyvalues.sql @@ -0,0 +1 @@ +ALTER TABLE entity ADD kvdata JSONB; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..13b2ff5ba21ce08cd58465e6b7b9240c592f6f5c --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql @@ -0,0 +1,131 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20210206.1__tenant_function.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20210206.1__tenant_function.sql new file mode 100644 index 0000000000000000000000000000000000000000..899626ca4ed38154b7e8344e98e1e0b41459d391 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20210206.1__tenant_function.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20210206.2__tenant_field.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20210206.2__tenant_field.sql new file mode 100644 index 0000000000000000000000000000000000000000..6e5e7a7599f89a684574be098ed4a96d75068c1d --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20210206.2__tenant_field.sql @@ -0,0 +1 @@ +ALTER TABLE csource ADD tenant_id TEXT; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20210206.3__tenant_table.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20210206.3__tenant_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ea65d8e5fd612f8a5f0a3cd20d9ae081aba11f1 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20210206.3__tenant_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS tenant ( + tenant_id TEXT NOT NULL, + database_name varchar(255) UNIQUE, + PRIMARY KEY (tenant_id) +); \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20211217.1__subscription_table.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20211217.1__subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..104b878e08881a8de88364102af8b82ac5cd1a1f --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20211217.1__subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..28f87847b253efcabcac9dc467a64ea1774766fa --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS registry_subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql new file mode 100644 index 0000000000000000000000000000000000000000..b8fc302dd290e0b4a560b3b5bf0c09e5fa0a199a --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql @@ -0,0 +1,163 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}')) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}')) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getGeoJson (ldjson jsonb) +RETURNS jsonb AS $geojson$ +declare + geojson jsonb; +BEGIN + SELECT json_build_object('type', substring(ldjson#>>'{@type,0}' from 32),'coordinates',getCoordinates(ldjson#>'{https://purl.org/geojson/vocab#coordinates,0,@list}')) into geojson; + RETURN geojson; +END; +$geojson$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20220124.1__scope_support.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20220124.1__scope_support.sql new file mode 100644 index 0000000000000000000000000000000000000000..40f3e01afad101fbea692822b60923ab63123965 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20220124.1__scope_support.sql @@ -0,0 +1,52 @@ +ALTER TABLE public.entity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes,'{-1}', (i#>'{@value}')) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..77f733a8e2015aac5d0c1190fb0b5bbd6256fd24 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..0167acd3afc6a30007b262cef29778be77ec9089 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql @@ -0,0 +1,103 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20220126.1__scope_support_2.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20220126.1__scope_support_2.sql new file mode 100644 index 0000000000000000000000000000000000000000..6f7224edef85a212c0e339117292b2fbd78307e1 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20220126.1__scope_support_2.sql @@ -0,0 +1,128 @@ +ALTER TABLE public.csource + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +ALTER TABLE public.temporalentity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20220127.1__scope_support_3.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20220127.1__scope_support_3.sql new file mode 100644 index 0000000000000000000000000000000000000000..aef923126f490e1683b02763d8cb70eb7f971c26 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20220127.1__scope_support_3.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..a27bbc3ad1a40b4e5e7ad176746076c6cace0d70 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql new file mode 100644 index 0000000000000000000000000000000000000000..7710a0ee88d8dfd878acef4b862d42c051bb0d56 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}'), true) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}'), true) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql new file mode 100644 index 0000000000000000000000000000000000000000..6b5247225608c9e0224d3e823dcfa651b14cdfb0 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql new file mode 100644 index 0000000000000000000000000000000000000000..64998eb0a070a7e846fb27e46173897875035395 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + IF scopes IS NULL THEN + return false; + END IF; + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql new file mode 100644 index 0000000000000000000000000000000000000000..3fcb41a0d6a8461a015ac825c6a21ec9af3476e9 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql @@ -0,0 +1,150 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; + +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..36f137d1768dfa06191276d5fbb6cdf1319b1ef6 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = FALSE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20221122.1__move161.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20221122.1__move161.sql new file mode 100644 index 0000000000000000000000000000000000000000..2bfd6cf469984dc77c1e20130833088fd0b3423d --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20221122.1__move161.sql @@ -0,0 +1,554 @@ +DROP TABLE csourceinformation; + +Alter table public.csource DROP COLUMN "location",DROP COLUMN "name", DROP COLUMN endpoint,DROP COLUMN description,DROP COLUMN timestamp_end,DROP COLUMN timestamp_start,DROP COLUMN tenant_id,DROP COLUMN internal,DROP COLUMN has_registrationinfo_with_attrs_only,DROP COLUMN has_registrationinfo_with_entityinfo_only,DROP COLUMN data_without_sysattrs,DROP COLUMN scopes, DROP COLUMN expires, DROP COLUMN type; + +ALTER TABLE PUBLIC.CSOURCE RENAME COLUMN data TO REG; + +alter table public.csource rename column id to c_id; + +ALTER TABLE PUBLIC.CSOURCE DROP CONSTRAINT csource_pkey; + +ALTER TABLE IF EXISTS public.csource + ADD CONSTRAINT unique_c_id UNIQUE (c_id); + +ALTER TABLE IF EXISTS public.csource + ADD COLUMN id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ); + +ALTER TABLE public.csource ADD PRIMARY KEY (id); + +CREATE INDEX i_csource_c_id + ON public.csource USING hash + (c_id text_pattern_ops); + +CREATE INDEX i_csource_id + ON public.csource USING btree + (id); + + +CREATE TABLE public.csourceinformation( + id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ), + cs_id bigint, + c_id text, + e_id text, + e_id_p text, + e_type text, + e_prop text, + e_rel text, + i_location GEOMETRY(Geometry, 4326), + scopes text[], + expires timestamp without time zone, + endpoint text, + tenant_id text, + headers jsonb, + reg_mode smallint, + createEntity boolean, + updateEntity boolean, + appendAttrs boolean, + updateAttrs boolean, + deleteAttrs boolean, + deleteEntity boolean, + createBatch boolean, + upsertBatch boolean, + updateBatch boolean, + deleteBatch boolean, + upsertTemporal boolean, + appendAttrsTemporal boolean, + deleteAttrsTemporal boolean, + updateAttrsTemporal boolean, + deleteAttrInstanceTemporal boolean, + deleteTemporal boolean, + mergeEntity boolean, + replaceEntity boolean, + replaceAttrs boolean, + mergeBatch boolean, + retrieveEntity boolean, + queryEntity boolean, + queryBatch boolean, + retrieveTemporal boolean, + queryTemporal boolean, + retrieveEntityTypes boolean, + retrieveEntityTypeDetails boolean, + retrieveEntityTypeInfo boolean, + retrieveAttrTypes boolean, + retrieveAttrTypeDetails boolean, + retrieveAttrTypeInfo boolean, + createSubscription boolean, + updateSubscription boolean, + retrieveSubscription boolean, + querySubscription boolean, + deleteSubscription boolean, + entityMap boolean, + canCompress boolean, + CONSTRAINT id_pkey PRIMARY KEY (id), + CONSTRAINT cs_id_fkey FOREIGN KEY (cs_id) + REFERENCES public.csource (id) MATCH SIMPLE + ON UPDATE CASCADE + ON DELETE CASCADE +); + + +CREATE INDEX IF NOT EXISTS fki_cs_id_fkey + ON public.csourceinformation(cs_id); + +CREATE INDEX i_csourceinformation_e_type + ON public.csourceinformation USING hash + (e_type text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_rel + ON public.csourceinformation USING hash + (e_rel text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_prop + ON public.csourceinformation USING hash + (e_prop text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_id + ON public.csourceinformation USING hash + (e_id text_pattern_ops); + +CREATE INDEX i_csourceinformation_i_location + ON public.csourceinformation USING gist + (i_location gist_geometry_ops_2d); + +DROP FUNCTION public.csource_extract_jsonb_fields_to_information_table cascade; +DROP Trigger csource_extract_jsonb_fields ON csource; + +CREATE TABLE temp ( + c_id text, + reg jsonb +); +INSERT INTO temp SELECT c_id, reg FROM csource; + +DELETE FROM csource; + +INSERT INTO csource SELECT c_id, reg FROM temp; + +drop table temp; + +ALTER TABLE PUBLIC.ENTITY RENAME COLUMN DATA TO ENTITY; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN DATA_WITHOUT_SYSATTRS; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN KVDATA; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OBSERVATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OPERATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN CONTEXT; + +ALTER TABLE PUBLIC.ENTITY ADD COLUMN E_TYPES TEXT[]; + +CREATE INDEX "I_entity_scopes" + ON public.entity USING gin + (scopes array_ops); + +CREATE INDEX "I_entity_types" + ON public.entity USING gin + (e_types array_ops); + +CREATE OR REPLACE FUNCTION public.entity_extract_jsonb_fields() RETURNS trigger LANGUAGE plpgsql AS $function$ + BEGIN + + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NULL AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NOT NULL AND OLD.ENTITY <> NEW.ENTITY) THEN + NEW.createdat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + IF (NEW.ENTITY@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.ENTITY ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + NEW.scopes = getScopes(NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + RETURN NEW; + END; +$function$; + +UPDATE ENTITY SET E_TYPES=array_append(E_TYPES,TYPE); + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN type; + + +CREATE OR REPLACE FUNCTION CSOURCE_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE +BEGIN + NEW.C_ID = NEW.REG#>>'{@id}'; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,false,false]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS(); + +CREATE OR REPLACE FUNCTION GETMODE (MODETEXT text) RETURNS smallint AS $registry_mode$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$registry_mode$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; + + + +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + RETURN jsonb_set(ENTITY,ARRAY[attribName], tmp); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; + + diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230108.1__subscription161.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230108.1__subscription161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c8115353d5ba16497cc30b10ef8a1fe6e0915041 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230108.1__subscription161.sql @@ -0,0 +1,18 @@ +DROP TABLE subscriptions; +DROP TABLE registry_subscriptions; + +CREATE TABLE public.subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); + +CREATE TABLE public.registry_subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230212.1__context.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230212.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..665c49dd33b0c8c5bfea4e2361c29df16fd01e7d --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230212.1__context.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS public.contexts +( + id text NOT NULL, + body jsonb NOT NULL, + kind text NOT NULL, + createdat timestamp without time zone, + PRIMARY KEY (id) +); +ALTER TABLE public.contexts alter createdat set default now(); diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230220.1__batchops161.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230220.1__batchops161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c31264330e2d38c953e892ff29b43295aedfc5ea --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230220.1__batchops161.sql @@ -0,0 +1,99 @@ +CREATE OR REPLACE FUNCTION NGSILD_CREATEBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOCR$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj['success'] = resultObj['success'] || (entity->'@id')::jsonb; + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOCR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_agg(entityId); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || (newentity->'@id')::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230309.1__datamigration161.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230309.1__datamigration161.sql new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230311.1__temporal161.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230311.1__temporal161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c502a34416bf47b00231f8be37f6dba50a7c0c55 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230311.1__temporal161.sql @@ -0,0 +1,65 @@ +ALTER TABLE PUBLIC.temporalentity ADD COLUMN E_TYPES TEXT[]; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN VALUE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN attributetype; +CREATE INDEX "I_temporalentity_types" + ON public.temporalentity USING gin + (e_types array_ops); +UPDATE temporalentity SET E_TYPES=array_append(E_TYPES,TYPE); +ALTER TABLE PUBLIC.temporalentity DROP COLUMN type; +ALTER TABLE PUBLIC.temporalentity ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN static; +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopeEntry (scopeList text[]) +RETURNS jsonb AS $scopes$ +declare + scopes jsonb; + i text; +BEGIN + scopes := '[]'::jsonb; + FOREACH i IN ARRAY scopeList LOOP + scopes = scopes || jsonb_build_object('@value', i); + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + if scopeList is null THEN + RETURN null; + END IF; + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE INDEX i_temporalentityattrinstance_attribname + ON public.temporalentityattrinstance USING hash + (attributeid text_ops); +CREATE INDEX i_temporalentity_location ON public.temporalentityattrinstance USING GIST (geovalue); \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230410.1__entitymap.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230410.1__entitymap.sql new file mode 100644 index 0000000000000000000000000000000000000000..92b172eb27cbfb372bfc729a44b1009b3946e4d5 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230410.1__entitymap.sql @@ -0,0 +1,19 @@ +CREATE TABLE public.entitymap +( + "q_token" text NOT NULL, + "entity_id" text, + "remote_hosts" jsonb, + "order_field" numeric NOT NULL +); + +CREATE INDEX i_entitymap_qtoken + ON public.entitymap USING hash + ("q_token" text_pattern_ops) +; + +CREATE TABLE public.entitymap_management +( + q_token text NOT NULL, + last_access timestamp with time zone NOT NULL, + PRIMARY KEY (q_token) +); diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230623.1__merge_patch.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230623.1__merge_patch.sql new file mode 100644 index 0000000000000000000000000000000000000000..684f327524131fa450d4e3deba24b4ab762ed4db --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230623.1__merge_patch.sql @@ -0,0 +1,36 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +if merged_json::text like '%"urn:ngsi-ld:null"%' THEN +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +end if; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..29a8a59a3c89cdad8b22af1254310c3d3f88c4c9 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql @@ -0,0 +1,29 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id'; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230705.1__core_context_store.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230705.1__core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..66bf42339d3705b05931f4a532703aa74769dc73 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230705.1__core_context_store.sql @@ -0,0 +1,300 @@ +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } +} +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql new file mode 100644 index 0000000000000000000000000000000000000000..af7e046119aac14e17ee33dc1cc6a074d723977c --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql @@ -0,0 +1,128 @@ +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230726.1__fixsubs.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230726.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..4520fbc02736783525f5e80a3980b023ce99263c --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230726.1__fixsubs.sql @@ -0,0 +1 @@ +update subscriptions set subscription=subscription-'https://uri.etsi.org/ngsi-ld/lastFailure ' \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230810.1__historyup.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230810.1__historyup.sql new file mode 100644 index 0000000000000000000000000000000000000000..06402b2bf88db1ca416edda068dc0dee6706574d --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230810.1__historyup.sql @@ -0,0 +1,39 @@ +ALTER TABLE IF EXISTS public.temporalentityattrinstance + ADD COLUMN IF NOT EXISTS location geometry; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_location + ON public.temporalentityattrinstance USING gist + (location) + WITH (buffering=auto) +; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_entityid + ON public.temporalentityattrinstance USING hash + (temporalentity_id) +; +with x as (SELECT distinct temporalentity_id as eid, geovalue, modifiedat as mat, observedat as oat, COALESCE(modifiedat, observedat) FROM temporalentityattrinstance WHERE geovalue is not null ORDER BY COALESCE(modifiedat, observedat)) UPDATE temporalentityattrinstance SET location = (SELECT x.geovalue FROM x WHERE eid = temporalentity_id and COALESCE(x.mat, x.oat) <= COALESCE(modifiedat, observedat) ORDER BY COALESCE(modifiedat, observedat) DESC limit 1) WHERE location is not null; + +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ + diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql new file mode 100644 index 0000000000000000000000000000000000000000..a17d3b8879ba7f194546f3f3ace5f41e42e9a2ec --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql @@ -0,0 +1,52 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql new file mode 100644 index 0000000000000000000000000000000000000000..82cac5034c11506304e8109eb2aa122cd408b952 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql @@ -0,0 +1,56 @@ +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + IF not attribValues ? 'https://uri.etsi.org/ngsi-ld/modifiedAt' THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + tmp := jsonb_set(tmp,Array['0','https://uri.etsi.org/ngsi-ld/modifiedAt'], Entity->'https://uri.etsi.org/ngsi-ld/modifiedAt',true); + END IF; + RETURN jsonb_set(Entity,Array[attribName,'0'], (Entity->attribName->0) || (tmp->0),true); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..833426b43969a0c3842988b8d0631e776f23cbd0 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql @@ -0,0 +1,314 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabularyProperty": "ngsi-ld:VocabularyProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20230904.1__fixsubs.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20230904.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..02ca66926497a6b82e4bcf2d39ad6a5e9ec38489 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20230904.1__fixsubs.sql @@ -0,0 +1 @@ +UPDATE SUBSCRIPTIONS SET SUBSCRIPTION=JSONB_SET(SUBSCRIPTION, '{@id}', ('"'||SUBSCRIPTION_ID||'"')::jsonb, true); \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql new file mode 100644 index 0000000000000000000000000000000000000000..a09bbd49ecbaa11601b43f09a7d630fcbcaf446b --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql @@ -0,0 +1,96 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', entityId, 'old', prev_entity); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql new file mode 100644 index 0000000000000000000000000000000000000000..5088d096c22fe1aa5e8b82aa5391b25dbd76a0e3 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql @@ -0,0 +1,57 @@ +DROP FUNCTION merge_json(text,jsonb); + +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; +ret := jsonb_build_array(previous_entity, merged_json); + + RETURN ret; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..795a2f213be016348be3eebc8c31bcd77c9f3a8f --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql @@ -0,0 +1,25 @@ +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE teai.internalid = new.internalid and COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql new file mode 100644 index 0000000000000000000000000000000000000000..a7437255d864ad92561c657c4e23a22cb4d951b5 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql @@ -0,0 +1,75 @@ +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb, NOOVERWRITE boolean) RETURNS jsonb AS $ENTITYOAR$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Overwriting'); + ELSIF NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + ELSE + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + + RETURN resultObj; +END; +$ENTITYOAR$ +LANGUAGE PLPGSQL; + + +ALTER TABLE temporalentityattrinstance ADD COLUMN IF NOT EXISTS static boolean \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20231128.1__upsertfix.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20231128.1__upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..573c77b1b3701ed5532925bada113667267c7dbe --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20231128.1__upsertfix.sql @@ -0,0 +1,44 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..017016b3606fcb09d107b10217acec17bb799c2d --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql @@ -0,0 +1,363 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20240212.1__merge_batchops.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20240212.1__merge_batchops.sql new file mode 100644 index 0000000000000000000000000000000000000000..c5da5b65a9b6a9189123871366d0d474a238c250 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20240212.1__merge_batchops.sql @@ -0,0 +1,66 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON_BATCH(b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id',newentity->'@id')::jsonb; + ELSE + resultObj['failure'] := resultObj['failure'] || jsonb_object_agg(newentity->'@id'->>0, 'Not Found'); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20240319.1__context.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20240319.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..38ae052ffe9a214504c3912b7b5e6c1a92b17308 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20240319.1__context.sql @@ -0,0 +1,365 @@ +ALTER TABLE public.contexts add column lastUsage timestamp without time zone, add column numberOfHits bigint default 0; + +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'Hosted'); \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..19e8cf97e5ecba2781bc4d559f05787b4fd3e9a3 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql @@ -0,0 +1,663 @@ + +DROP TABLE IF EXISTS public.entitymap; +DROP TABLE IF EXISTS public.entitymap_management; +DROP FUNCTION IF EXISTS ngsild_appendbatch(jsonb); +DROP FUNCTION IF EXISTS ngsild_upsertbatch(jsonb); + +CREATE OR REPLACE FUNCTION public.ngsild_deletebatch(IN entity_ids jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, 'Not Found')); + else + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', prev_entity)); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_createbatch(IN entities jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || (entity->'@id')::jsonb); + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_appendbatch(IN entities jsonb,IN nooverwrite boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Overwriting')); + ELSIF NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + ELSE + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb); + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_upsertbatch(IN entities jsonb,IN do_replace boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE TABLE public.entitymap +( + id text, + expires_at timestamp without time zone, + last_access timestamp without time zone, + entity_map jsonb, + followup_select text, + PRIMARY KEY (id) +); + +CREATE OR REPLACE FUNCTION public.getmode(IN modetext text) + RETURNS smallint + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.updateMapIfNeeded(IN ids text[], ientityMap jsonb, entityMapToken text) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entityMapEntry jsonb; + +BEGIN + if array_length(ids, 1) = 0 or ids is null then + return ientityMap; + else + entityMapEntry := ientityMap -> 'entityMap'; + SELECT jsonb_agg(entry) INTO entityMapEntry FROM jsonb_array_elements(entityMapEntry) as entry, jsonb_object_keys(entry) as id WHERE NOT(id = ANY(ids)); + ientityMap := jsonb_set(ientityMap, '{entityMap}', entityMapEntry); + UPDATE ENTITYMAP SET LAST_ACCESS = NOW(), entity_map = ientityMap WHERE id=entityMapToken; + return ientityMap; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.getEntityMapAndEntities(IN entityMapToken text, ids text[], ilimit int, ioffset int) + RETURNS TABLE(id text, entity jsonb, parent boolean, e_types text[], entity_map jsonb) + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entitymap jsonb; + regempty boolean; + noRootLevelRegEntry boolean; + queryText text; +BEGIN + if ids is null or array_length(ids, 1) = 0 then + UPDATE ENTITYMAP SET LAST_ACCESS = NOW() WHERE ENTITYMAP.id=entityMapToken RETURNING ENTITYMAP.ENTITY_MAP INTO entitymap; + if entitymap is null then + RAISE EXCEPTION 'Nonexistent ID --> %', entityMapToken USING ERRCODE = 'S0001'; + end if; + regempty := entitymap -> 'regEmptyOrNoRegEntryAndNoLinkedQuery'; + noRootLevelRegEntry := entitymap -> 'noRootLevelRegEntryAndLinkedQuery'; + + if regempty or noRootLevelRegEntry then + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY > $2), ' + || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ' limit $3), X as (SELECT D0.ID as id, max(D0.ordinality) as maxOrdinality FROM D0 GROUP BY D0.ID), C as (SELECT updateMapIfNeeded(ids.aggIds, $4, $5) as entity_map FROM (SELECT ARRAY_AGG(a.id) as aggIds FROM a LEFT JOIN X ON a.id = X.ID WHERE X.ID IS NULL AND a.ordinality <= X.maxOrdinality) as ids)' + || (entitymap ->> 'finalselect')) using (entitymap->'entityMap'), ioffset, ilimit, entitymap, entityMapToken; + else + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY between $2 and ($2 + $3) and entityIdEntry.value ? ''@none''), C as (SELECT $4 as entity_map), ' || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ')' ||(entitymap ->> 'finalselect')) using entitymap->'entityMap', ioffset, ilimit, entitymap; + end if; + else + if regempty or noRootLevelRegEntry then + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + else + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + end if; + end if; +END; +$BODY$; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS entitymap; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS cancompress; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN queryEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN createEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN updateEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN deleteEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN retrieveEntityMap boolean; + +UPDATE public.csourceinformation SET queryEntityMap = false,createEntityMap = false, updateEntityMap = false, deleteEntityMap = false,retrieveEntityMap = false; + +CREATE OR REPLACE FUNCTION public.getoperations(IN operationjson jsonb) + RETURNS boolean[] + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + operations[37] = true; + operations[38] = true; + operations[39] = true; + operations[40] = true; + operations[41] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'queryEntityMap' THEN + operations[37] = true; + WHEN 'createEntityMap' THEN + operations[38] = true; + WHEN 'updateEntityMap' THEN + operations[39] = true; + WHEN 'deleteEntityMap' THEN + operations[40] = true; + WHEN 'retrieveEntityMap' THEN + operations[41] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.csourceinformation_extract_jsonb_fields() + RETURNS trigger + LANGUAGE 'plpgsql' + VOLATILE + COST 100 +AS $BODY$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..474a2ef4780544dc6697fefec62900f6c79bc1ed --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql @@ -0,0 +1,834 @@ +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id',newentity->>'@id', 'old', previous_entity)); + ELSE + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$BODY$; + +UPDATE contexts SET body = '{ + + "@context": { + + "@version": 1.1, + + "@protected": true, + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + + "geojson": "https://purl.org/geojson/vocab#", + + "id": "@id", + + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + + "AttributeList": "ngsi-ld:AttributeList", + + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + + "Date": "ngsi-ld:Date", + + "DateTime": "ngsi-ld:DateTime", + + "EntityType": "ngsi-ld:EntityType", + + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + + "EntityTypeList": "ngsi-ld:EntityTypeList", + + "Feature": "geojson:Feature", + + "FeatureCollection": "geojson:FeatureCollection", + + "GeoProperty": "ngsi-ld:GeoProperty", + + "GeometryCollection": "geojson:GeometryCollection", + + "JsonProperty": "ngsi-ld:JsonProperty", + + "LanguageProperty": "ngsi-ld:LanguageProperty", + + "LineString": "geojson:LineString", + + "ListProperty": "ngsi-ld:ListProperty", + + "ListRelationship": "ngsi-ld:ListRelationship", + + "MultiLineString": "geojson:MultiLineString", + + "MultiPoint": "geojson:MultiPoint", + + "MultiPolygon": "geojson:MultiPolygon", + + "Notification": "ngsi-ld:Notification", + + "Point": "geojson:Point", + + "Polygon": "geojson:Polygon", + + "Property": "ngsi-ld:Property", + + "Relationship": "ngsi-ld:Relationship", + + "Subscription": "ngsi-ld:Subscription", + + "TemporalProperty": "ngsi-ld:TemporalProperty", + + "Time": "ngsi-ld:Time", + + "VocabProperty": "ngsi-ld:VocabProperty", + + "accept": "ngsi-ld:accept", + + "attributeCount": "attributeCount", + + "attributeDetails": "attributeDetails", + + "attributeList": { + + "@id": "ngsi-ld:attributeList", + + "@type": "@vocab" + + }, + + "attributeName": { + + "@id": "ngsi-ld:attributeName", + + "@type": "@vocab" + + }, + + "attributeNames": { + + "@id": "ngsi-ld:attributeNames", + + "@type": "@vocab" + + }, + + "attributeTypes": { + + "@id": "ngsi-ld:attributeTypes", + + "@type": "@vocab" + + }, + + "attributes": { + + "@id": "ngsi-ld:attributes", + + "@type": "@vocab" + + }, + + "attrs": "ngsi-ld:attrs", + + "avg": { + + "@id": "ngsi-ld:avg", + + "@container": "@list" + + }, + + "bbox": { + + "@container": "@list", + + "@id": "geojson:bbox" + + }, + + "cacheDuration": "ngsi-ld:cacheDuration", + + "containedBy": "ngsi-ld:isContainedBy", + + "contextSourceAlias": "ngsi-ld:contextSourceAlias", + + "contextSourceExtras": { + + "@id": "ngsi-ld:contextSourceExtras", + + "@type": "@json" + + }, + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + + "contextSourceTimeAt": { + + "@id": "ngsi-ld:contextSourceTimeAt", + + "@type": "DateTime" + + }, + + "contextSourceUptime": "ngsi-ld:contextSourceUptime", + + "cooldown": "ngsi-ld:cooldown", + + "coordinates": { + + "@container": "@list", + + "@id": "geojson:coordinates" + + }, + + "createdAt": { + + "@id": "ngsi-ld:createdAt", + + "@type": "DateTime" + + }, + + "csf": "ngsi-ld:csf", + + "data": "ngsi-ld:data", + + "dataset": { + + "@id": "ngsi-ld:hasDataset", + + "@container": "@index" + + }, + + "datasetId": { + + "@id": "ngsi-ld:datasetId", + + "@type": "@id" + + }, + + "deletedAt": { + + "@id": "ngsi-ld:deletedAt", + + "@type": "DateTime" + + }, + + "description": "http://purl.org/dc/terms/description", + + "detail": "ngsi-ld:detail", + + "distinctCount": { + + "@id": "ngsi-ld:distinctCount", + + "@container": "@list" + + }, + + "endAt": { + + "@id": "ngsi-ld:endAt", + + "@type": "DateTime" + + }, + + "endTimeAt": { + + "@id": "ngsi-ld:endTimeAt", + + "@type": "DateTime" + + }, + + "endpoint": "ngsi-ld:endpoint", + + "entities": "ngsi-ld:entities", + + "pick": "ngsi-ld:pick", + + "omit": "ngsi-ld:omit", + + "jsonKeys": "ngsi-ld:jsonKeys", + + "entity": "ngsi-ld:entity", + + "entityCount": "ngsi-ld:entityCount", + + "entityId": { + + "@id": "ngsi-ld:entityId", + + "@type": "@id" + + }, + + "entityList": { + + "@id": "ngsi-ld:entityList", + + "@container": "@list" + + }, + + "entityMap": "ngsi-ld:hasEntityMap", + + "error": "ngsi-ld:error", + + "errors": "ngsi-ld:errors", + + "expiresAt": { + + "@id": "ngsi-ld:expiresAt", + + "@type": "DateTime" + + }, + + "features": { + + "@container": "@set", + + "@id": "geojson:features" + + }, + + "format": "ngsi-ld:format", + + "geoQ": "ngsi-ld:geoQ", + + "geometry": "geojson:geometry", + + "geoproperty": "ngsi-ld:geoproperty", + + "georel": "ngsi-ld:georel", + + "idPattern": "ngsi-ld:idPattern", + + "information": "ngsi-ld:information", + + "instanceId": { + + "@id": "ngsi-ld:instanceId", + + "@type": "@id" + + }, + + "isActive": "ngsi-ld:isActive", + + "join": "ngsi-ld:join", + + "joinLevel": "ngsi-ld:hasJoinLevel", + + "json": { + + "@id": "ngsi-ld:hasJSON", "@type": "@json" + + }, + + "jsons": { + + "@id": "ngsi-ld:jsons", + + "@container": "@list" + + }, + + "key": "ngsi-ld:hasKey", + + "lang": "ngsi-ld:lang", + + "languageMap": { + + "@id": "ngsi-ld:hasLanguageMap", + + "@container": "@language" + + }, + + "languageMaps": { + + "@id": "ngsi-ld:hasLanguageMaps", + + "@container": "@list" + + }, + + "lastFailure": { + + "@id": "ngsi-ld:lastFailure", + + "@type": "DateTime" + + }, + + "lastNotification": { + + "@id": "ngsi-ld:lastNotification", + + "@type": "DateTime" + + }, + + "lastSuccess": { + + "@id": "ngsi-ld:lastSuccess", + + "@type": "DateTime" + + }, + + "linkedMaps": "ngsi-ld:linkedMaps", + + "localOnly": "ngsi-ld:localOnly", + + "location": "ngsi-ld:location", + + "management": "ngsi-ld:management", + + "managementInterval": "ngsi-ld:managementInterval", + + "max": { + + "@id": "ngsi-ld:max", + + "@container": "@list" + + }, + + "min": { + + "@id": "ngsi-ld:min", + + "@container": "@list" + + }, + + "mode": "ngsi-ld:mode", + + "modifiedAt": { + + "@id": "ngsi-ld:modifiedAt", + + "@type": "DateTime" + + }, + + "notification": "ngsi-ld:notification", + + "notificationTrigger": "ngsi-ld:notificationTrigger", + + "notifiedAt": { + + "@id": "ngsi-ld:notifiedAt", + + "@type": "DateTime" + + }, + + "notifierInfo": "ngsi-ld:notifierInfo", + + "notUpdated": "ngsi-ld:notUpdated", + + "object": { + + "@id": "ngsi-ld:hasObject", + + "@type": "@id" + + }, + + "objectList": { + + "@id": "ngsi-ld:hasObjectList", + + "@container": "@list" + + }, + + "objects": { + + "@id": "ngsi-ld:hasObjects", + + "@container": "@list" + + }, + + "objectsLists": { + + "@id": "ngsi-ld:hasObjectsLists", + + "@container": "@list" + + }, + + "objectType": { + + "@id": "ngsi-ld:hasObjectType", + + "@type": "@vocab" + + }, + + "observationInterval": "ngsi-ld:observationInterval", + + "observationSpace": "ngsi-ld:observationSpace", + + "observedAt": { + + "@id": "ngsi-ld:observedAt", + + "@type": "DateTime" + + }, + + "operationSpace": "ngsi-ld:operationSpace", + + "operations": "ngsi-ld:operations", + + "previousJson": { + + "@id": "ngsi-ld:hasPreviousJson", + + "@type": "@json" + + }, + + "previousLanguageMap": { + + "@id": "ngsi-ld:hasPreviousLanguageMap", + + "@container": "@language" + + }, + + "previousObject": { + + "@id": "ngsi-ld:hasPreviousObject", + + "@type": "@id" + + }, + + "previousObjectList": { + + "@id": "ngsi-ld:hasPreviousObjectList", + + "@container": "@list" + + }, + + "previousValue": "ngsi-ld:hasPreviousValue", + + "previousValueList": { + + "@id": "ngsi-ld:hasPreviousValueList", + + "@container": "@list" + + }, + + "previousVocab": { + + "@id": "ngsi-ld:hasPreviousVocab", + + "@type": "@vocab" + + }, + + "properties": "geojson:properties", + + "propertyNames": { + + "@id": "ngsi-ld:propertyNames", + + "@type": "@vocab" + + }, + + "q": "ngsi-ld:q", + + "reason": "ngsi-ld:reason", + + "receiverInfo": "ngsi-ld:receiverInfo", + + "refreshRate": "ngsi-ld:refreshRate", + + "registrationId": "ngsi-ld:registrationId", + + "registrationName": "ngsi-ld:registrationName", + + "relationshipNames": { + + "@id": "ngsi-ld:relationshipNames", + + "@type": "@vocab" + + }, + + "scope": "ngsi-ld:scope", + + "scopeQ": "ngsi-ld:scopeQ", + + "showChanges": "ngsi-ld:showChanges", + + "startAt": { + + "@id": "ngsi-ld:startAt", + + "@type": "DateTime" + + }, + + "status": "ngsi-ld:status", + + "stddev": { + + "@id": "ngsi-ld:stddev", + + "@container": "@list" + + }, + + "subscriptionId": { + + "@id": "ngsi-ld:subscriptionId", + + "@type": "@id" + + }, + + "subscriptionName": "ngsi-ld:subscriptionName", + + "success": { + + "@id": "ngsi-ld:success", + + "@type": "@id" + + }, + + "sum": { + + "@id": "ngsi-ld:sum", + + "@container": "@list" + + }, + + "sumsq": { + + "@id": "ngsi-ld:sumsq", + + "@container": "@list" + + }, + + "sysAttrs": "ngsi-ld:sysAttrs", + + "temporalQ": "ngsi-ld:temporalQ", + + "tenant": { + + "@id": "ngsi-ld:tenant", + + "@type": "@id" + + }, + + "throttling": "ngsi-ld:throttling", + + "timeAt": { + + "@id": "ngsi-ld:timeAt", + + "@type": "DateTime" + + }, + + "timeInterval": "ngsi-ld:timeInterval", + + "timeout": "ngsi-ld:timeout", + + "timeproperty": "ngsi-ld:timeproperty", + + "timerel": "ngsi-ld:timerel", + + "timesFailed": "ngsi-ld:timesFailed", + + "timesSent": "ngsi-ld:timesSent", + + "title": "http://purl.org/dc/terms/title", + + "totalCount": { + + "@id": "ngsi-ld:totalCount", + + "@container": "@list" + + }, + + "triggerReason": "ngsi-ld:triggerReason", + + "typeList": { + + "@id": "ngsi-ld:typeList", + + "@type": "@vocab" + + }, + + "typeName": { + + "@id": "ngsi-ld:typeName", + + "@type": "@vocab" + + }, + + "typeNames": { + + "@id": "ngsi-ld:typeNames", + + "@type": "@vocab" + + }, + + "unchanged": "ngsi-ld:unchanged", + + "unitCode": "ngsi-ld:unitCode", + + "updated": "ngsi-ld:updated", + + "uri": "ngsi-ld:uri", + + "value": "ngsi-ld:hasValue", + + "valueList": { + + "@id": "ngsi-ld:hasValueList", + + "@container": "@list" + + }, + + "valueLists": { + + "@id": "ngsi-ld:hasValueLists", + + "@container": "@list" + + }, + + "values": { + + "@id": "ngsi-ld:hasValues", + + "@container": "@list" + + }, + + "vocab": { + + "@id": "ngsi-ld:hasVocab", + + "@type": "@vocab" + + }, + + "vocabs": { + + "@id": "ngsi-ld:hasVocabs", + + "@container": "@list" + + }, + + "watchedAttributes": { + + "@id": "ngsi-ld:watchedAttributes", + + "@type": "@vocab" + + }, + + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + + } + +} + +'::jsonb WHERE id=')$%^&'; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..90d4785b7e7d4b82c6ac1bf4c88ac56043f995bc --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql @@ -0,0 +1,963 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_point(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE +BEGIN + if not geo_json_entry ? '@list' or jsonb_array_length(geo_json_entry #> '{@list}') != 2 then + RAISE EXCEPTION 'Invalid geo point for geo json' USING ERRCODE = 'SB006'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.VALIDATE_GEO_JSON(IN GEO_JSON_ENTRY JSONB) RETURNS VOID LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + geo_type text; + value jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.clean_ngsi_ld_null(IN json_entry jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + json_type text; + result jsonb; + value jsonb; + cleaned jsonb; + key text; +BEGIN + json_type = jsonb_typeof(json_entry); + if json_type = 'array' then + result = '[]'::jsonb; + for value in select * from jsonb_array_elements(json_entry) loop + cleaned = clean_ngsi_ld_null(value); + if cleaned is not null then + result = result || cleaned; + end if; + end loop; + if jsonb_array_length(result) = 0 then + return null; + end if; + return result; + elsif json_type = 'object' then + result = '{}'; + for key, value in Select * from jsonb_each(json_entry) loop + if value::text != '"urn:ngsi-ld:null"' then + result = jsonb_set(result, '{key}', value); + end if; + end loop; + if result::text = '{}' then + return null; + end if; + return result; + else + if json_entry::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return json_entry; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_json(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_type text; + old_type text; + todelete jsonb; + deleted integer; + i integer; + index integer; + value jsonb; + value2 jsonb; + merged_json jsonb; + key text; +BEGIN + new_type = jsonb_typeof(new_attrib); + old_type = jsonb_typeof(old_attrib); + if old_attrib is null or new_type != old_type then + old_attrib := new_attrib; + end if; + todelete = '[]'::jsonb; + if new_type = 'array' then + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + for i in 0 .. jsonb_array_length(new_attrib) loop + if new_attrib ->> i = 'urn:ngsi-ld:null' then + todelete = todelete || i; + end if; + end loop; + deleted = 0; + if array_length(todelete) > 0 then + for i in select * from jsonb_array_elements(todelete) loop + new_attrib = new_attrib - (i - deleted); + deleted = deleted + 1; + end loop; + end if; + return new_attrib; + end if; + index = 0; + deleted = 0; + for value in select * from jsonb_array_elements(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + index := index + 1; + continue; + end if; + value2 = old_attrib[index - deleted]; + merged_json = merge_has_json(value, value2); + if merged_json is null then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - deleted)]::text[], merged_json); + end if; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + elsif new_type = 'object' then + for key, value in Select * from jsonb_each(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - key; + continue; + end if; + merged_json = merge_has_json(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + continue; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end loop; + if old_attrib::text = '{}' then + return null; + end if; + return old_attrib; + else + if new_attrib::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return new_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_vocab(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_language_map(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + index integer; + remove boolean; + value2 jsonb; + ln_found boolean; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in Select * from jsonb_array_elements(new_attrib) loop + if value ->> '@language' = '@none' and value ->> '@value' = 'urn:ngsi-ld:null' then + return null; + else + index = 0; + ln_found = false; + remove = false; + for value2 in Select * from jsonb_array_elements(old_attrib) loop + if value2 ->> '@language' = value->> '@language' then + ln_found = true; + if value ->> '@value' = 'urn:ngsi-ld:null' then + remove = true; + end if; + exit; + end if; + index = index + 1; + end loop; + if ln_found then + if remove then + old_attrib = old_attrib - index; + else + old_attrib = jsonb_set(old_attrib, ARRAY[index,'@value']::text[], value->'@value'); + end if; + else + old_attrib = old_attrib || value; + end if; + end if; + end loop; + RETURN old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_geo(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + key text; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,https://purl.org/geojson/vocab#coordinates,0,@list,0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + for value in select * from jsonb_array_elements(new_attrib) loop + PERFORM validate_geo_json(value); + end loop; + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://purl.org/geojson/vocab#coordinates' then + if value2 #>> '{0,@list,0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + elsif key = '@type' then + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + PERFORM validate_geo_json(old_attrib[(index - removed)]); + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib #> '{0,@list}'; + if old_attrib is null then + old_attrib = new_attrib; + end if; + old_value_list = old_attrib #> '{0,@list}'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed)]::text[], (old_attrib #> ARRAY[0,'@list',(index-removed)]::text[]) - key); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed),key]::text[], merged_json); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB004'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; + +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib -> '@list'; + if old_attrib is null then + old_attrib := new_attrib; + end if; + old_value_list = old_attrib -> '@list'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], value2); + end if; + elsif key = '@list' then + merged_json = merge_has_value_list(value, old_value_list[index - removed]); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + + else + merged_json = merge_has_value(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + arr_idx integer; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + arr_idx := index - removed; + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - arr_idx; + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], value2); + end if; + else + arr_idx := index - removed; + merged_json = merge_has_value(value2, old_attrib #> ARRAY[arr_idx,key]::text[]); + if merged_json is null then + old_attrib[arr_idx] = old_attrib[arr_idx] - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_ATTRIB_INSTANCE(IN NEW_ATTRIB JSONB, + + IN OLD_ATTRIB JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + attrib_type TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + new_attrib := new_attrib - 'https://uri.etsi.org/ngsi-ld/createdAt'; + attrib_type := old_attrib #>> '{@type,0}'; + if attrib_type != new_attrib #>> '{@type,0}' then + RAISE EXCEPTION 'Cannot change type of an attribute' USING ERRCODE = 'SB001'; + end if; + if attrib_type = 'https://uri.etsi.org/ngsi-ld/Property' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/Relationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValueList' then + merged_json = merge_has_value_list(value[0], old_attrib #> '{key,0}'); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListRelationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectList' then + merged_json = merge_has_object_list(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/GeoProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value_geo(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/LanguageProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasLanguageMap' then + merged_json = merge_has_language_map(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/VocabProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasVocab' then + merged_json = merge_has_vocab(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/JsonProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasJSON' then + merged_json = merge_has_json(value #> ARRAY[0,'@value']::text[], old_attrib #> ARRAY[key,0,'@value']::text[]); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key,0,'@value']::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + else + RAISE EXCEPTION 'Unknown type of an attribute %, %, %', attrib_type, old_attrib, new_attrib USING ERRCODE = 'SB002'; + end if; + return old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + else + if new_dataset_id is null then + deleted := deleted || 'null'; + else + deleted := deleted || new_dataset_id; + end if; + end if; + else + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_JSON(IN A text,IN B JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL UNSAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + deleted JSONB; + updated JSONB; +BEGIN + +Select entity into previous_entity from entity where id =a; +if previous_entity is null then + RAISE EXCEPTION 'Entity not found.' USING ERRCODE = '02000'; +end if; +Select entity into merged_json from entity where id =a; +deleted := '{}'; +updated := '{}'; +-- Iterate through keys in JSON B +FOR key, value IN SELECT * FROM JSONB_EACH(b) +LOOP + if key = '@id' or key = 'https://uri.etsi.org/ngsi-ld/createdAt'then + continue; + elsif key = '@type' then + value2 = merged_json -> key; + WITH combined AS ( + SELECT jsonb_array_elements(value) AS elem + UNION + SELECT jsonb_array_elements(value2) AS elem + ) + SELECT jsonb_agg(elem) into value2 AS merged_array FROM combined; + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' then + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value); + else + value2 = merged_json -> key; + value2 = merge_attrib(value, value2); + if value2 ->'result' = 'null'::jsonb or jsonb_array_length(value2 ->'result') = 0 then + merged_json = merged_json - key; + deleted = jsonb_set(deleted, ARRAY[key]::text[], '["@all"]'::jsonb); + else + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2 -> 'result'); + if jsonb_array_length(value2 -> 'deleted') != 0 then + if deleted ? key then + deleted = jsonb_set(deleted, ARRAY[key], ((deleted -> key) || (value2 -> 'deleted'))); + else + deleted = jsonb_set(deleted, ARRAY[key], ((value2 -> 'deleted'))); + end if; + end if; + + if jsonb_array_length(value2 -> 'updated') != 0 then + if updated ? key then + updated = jsonb_set(updated, ARRAY[key], ((updated -> key) || (value2 -> 'updated'))); + else + updated = jsonb_set(updated, ARRAY[key], ((value2 -> 'updated'))); + end if; + end if; + + end if; + + + end if; +END LOOP; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + +RETURN jsonb_build_object('old', previous_entity, 'new', merged_json, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; + newentity jsonb; + resultObj jsonb; + entityId text; + index integer; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + index := 0; + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + entityId := newentity->>'@id'; + IF entityId is null then + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object('no id row nr ' || index, 'No entity id provided')); + else + BEGIN + ret := MERGE_JSON(entityId, newentity); + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', ret -> 'old', 'new', ret -> 'new', 'deleted', ret -> 'deleted', 'updated', ret -> 'updated')::jsonb); + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entityId, SQLSTATE)); + END; + end if; + index := index + 1; + END LOOP; + RETURN resultObj; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..f9eea8fca78af88cd9cca10817372067d2fec0e3 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql @@ -0,0 +1,69 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_json(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + geo_type text; + value jsonb; + value2 jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPoint' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi point update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiLineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPolygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/db/migration/V20240922.1__mergeattrib.sql b/scorpio-broker/QueryManager/target/classes/db/migration/V20240922.1__mergeattrib.sql new file mode 100644 index 0000000000000000000000000000000000000000..98411df52c5cfd8208b71983d6624d4bfd7452b9 --- /dev/null +++ b/scorpio-broker/QueryManager/target/classes/db/migration/V20240922.1__mergeattrib.sql @@ -0,0 +1,71 @@ +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || ('null'::jsonb); + else + updated := updated || to_jsonb(new_dataset_id); + end if; + else + if new_dataset_id is null then + deleted := deleted || ('null'::jsonb); + else + deleted := deleted || to_jsonb(new_dataset_id); + end if; + end if; + else + if new_dataset_id is null then + updated := updated || ('null'::jsonb); + else + updated := updated || to_jsonb(new_dataset_id); + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/controller/EntityOperationsQueryController.class b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/controller/EntityOperationsQueryController.class new file mode 100644 index 0000000000000000000000000000000000000000..26f7b1a975c189de4abcf5024ebdbd9565cf194c Binary files /dev/null and b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/controller/EntityOperationsQueryController.class differ diff --git a/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/controller/QueryController.class b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/controller/QueryController.class new file mode 100644 index 0000000000000000000000000000000000000000..cffc66b55dd362b13148c08d8127bedb40f7b8c7 Binary files /dev/null and b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/controller/QueryController.class differ diff --git a/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingBase.class b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingBase.class new file mode 100644 index 0000000000000000000000000000000000000000..e1668158e7f282dccbd4c3a8637c08e4c14ee35f Binary files /dev/null and b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingBase.class differ diff --git a/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingByteArray.class b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingByteArray.class new file mode 100644 index 0000000000000000000000000000000000000000..80022217b46fc9d990446d3d0cd692c20cc9c49a Binary files /dev/null and b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingByteArray.class differ diff --git a/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingInMemory.class b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingInMemory.class new file mode 100644 index 0000000000000000000000000000000000000000..f7082df553927401cc165df18df85bcf483a6cdc Binary files /dev/null and b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingInMemory.class differ diff --git a/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingString.class b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingString.class new file mode 100644 index 0000000000000000000000000000000000000000..97a25ace81e28a6992346a7f81af0a53a81cb5c4 Binary files /dev/null and b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingString.class differ diff --git a/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/repository/QueryDAO.class b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/repository/QueryDAO.class new file mode 100644 index 0000000000000000000000000000000000000000..3b859e47a7fc082fe3ac45bc28835e5ed65f21d9 Binary files /dev/null and b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/repository/QueryDAO.class differ diff --git a/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/services/QueryService.class b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/services/QueryService.class new file mode 100644 index 0000000000000000000000000000000000000000..8db73ada70c281e7a795953964a9ba4eda5acab0 Binary files /dev/null and b/scorpio-broker/QueryManager/target/classes/eu/neclab/ngsildbroker/queryhandler/services/QueryService.class differ diff --git a/scorpio-broker/QueryManager/target/maven-archiver/pom.properties b/scorpio-broker/QueryManager/target/maven-archiver/pom.properties new file mode 100644 index 0000000000000000000000000000000000000000..aa50ea535efbca57b79493eb634ea43c93ad67ff --- /dev/null +++ b/scorpio-broker/QueryManager/target/maven-archiver/pom.properties @@ -0,0 +1,5 @@ +#Generated by Maven +#Fri Jan 03 03:27:37 UTC 2025 +groupId=eu.neclab.ngsildbroker +artifactId=query-manager +version=5.0.5-SNAPSHOT diff --git a/scorpio-broker/QueryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst b/scorpio-broker/QueryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..45d22203a3d535f071772ff52523fbc12ca86052 --- /dev/null +++ b/scorpio-broker/QueryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst @@ -0,0 +1,8 @@ +eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingString.class +eu/neclab/ngsildbroker/queryhandler/services/QueryService.class +eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingByteArray.class +eu/neclab/ngsildbroker/queryhandler/controller/QueryController.class +eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingInMemory.class +eu/neclab/ngsildbroker/queryhandler/repository/QueryDAO.class +eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingBase.class +eu/neclab/ngsildbroker/queryhandler/controller/EntityOperationsQueryController.class diff --git a/scorpio-broker/QueryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst b/scorpio-broker/QueryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..1019ba1626d991fd547bf071789f091858ee0179 --- /dev/null +++ b/scorpio-broker/QueryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst @@ -0,0 +1,8 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/controller/EntityOperationsQueryController.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/controller/QueryController.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingBase.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingByteArray.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingInMemory.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/messaging/QueryManagerMessagingString.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/repository/QueryDAO.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/QueryManager/src/main/java/eu/neclab/ngsildbroker/queryhandler/services/QueryService.java diff --git a/scorpio-broker/QueryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst b/scorpio-broker/QueryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..24acdcceb83cde3ebd5218be29af6371a150469a --- /dev/null +++ b/scorpio-broker/QueryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst @@ -0,0 +1,3 @@ +eu/neclab/ngsildbroker/queryhandler/services/QueryServiceTest.class +eu/neclab/ngsildbroker/queryhandler/controller/CustomProfile.class +eu/neclab/ngsildbroker/queryhandler/controller/QueryControllerTest.class diff --git a/scorpio-broker/QueryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst b/scorpio-broker/QueryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..6afb5b65caa23c3a28e3971d07b86e354e591f03 --- /dev/null +++ b/scorpio-broker/QueryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst @@ -0,0 +1,3 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/QueryManager/src/test/java/eu/neclab/ngsildbroker/queryhandler/controller/CustomProfile.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/QueryManager/src/test/java/eu/neclab/ngsildbroker/queryhandler/controller/QueryControllerTest.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/QueryManager/src/test/java/eu/neclab/ngsildbroker/queryhandler/services/QueryServiceTest.java diff --git a/scorpio-broker/QueryManager/target/quarkus-app/app/query-manager-5.0.5-SNAPSHOT.jar b/scorpio-broker/QueryManager/target/quarkus-app/app/query-manager-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..888f8cdc308650d5ef28d79cf059ffd3bc7f63ae Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/app/query-manager-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..9d5d452f4a66d1165b27d4d604d2bddaad0e4cd7 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d9c956f0001cff936eb20c373592dcc6510f5ff Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f9f583af71764a7127e4da73677ed5c8dea97bfb Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0447153ded16f86775cdcb337f8576579c1ac0db Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..243fa559c2be0f61720e1a333e43d4da2e8b5516 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ce4cbac674f51eb2063ff475a0e70484b25ace9f Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..55063658b2d25baf50b6a3963c508233f695b3de Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..4a7a8e4b537b532f4f58717f3366e9cb6f0ff0ce Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b320c9d42b93b26981927265e5dc62b85e73263f Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d2520242b28d239ba3b138c17e65f8ae6103a787 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..0648aa9f6a2ea3b3ddd083471f1c8dacbb4bbc4f Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..134f669d9fbee6ca61a9c9bb36227376ed97d0a2 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3365ab16bb0cd576f88b808e9af9b024111a5070 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..778ea557232b0fb41df34d63353c219a371660b2 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fd5c901f4bb0e8ed59d2d040740021a7c5cf1b19 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f74846f3f35230a9e14c0bf98e4cccfec593b4c9 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ee6d31a6a025d92f9fb05550483fb6b415b7f066 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..52835b2540d6cb8aaffe22bd5c7c24203cc77538 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..dba6fdd40e73a6dfc06a462bc2646c2bfb7e6d5a Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c13bcb9104e907195d13bbb4f998c1e5594cc2e8 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..34be9026612b9553f55f5f1aed148fc96a9d8fcb Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3750b8c1cfae96e79305618c78653ac5fb9b6de5 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..327ee706dcf46e428dd6339b9744e77941e5b498 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c47febca79456ba4d389bbb46ea0e11e6a41bede Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6ff58aed923740c9a4f639b9a512dcfd08df921 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3aa01f1ee73130e4983d2c3520220b29995c4ccc Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..288bf56e1b4f5c5a2bb2152887c5ef12e6cddeae Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..f4f1af783043658e2b3879560b6e1ff0b8db66a1 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..85cefa6d7b5b644fb99075f6621ca60beb350cd9 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..c4b2b999ce72dcfdd9b63d6a62d0a2d7e2037034 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1d6d1c50a344b45ba375d53775b70ad5aac58f7 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..18e59c8c41de73e02e77298e981fa7e3051e4b5d Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a8f5dfe1b83122a9f085da1aa7fff451ed88e783 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b56dc751c1cc7dff75ed80ccbb45f027058e8ce Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar new file mode 100644 index 0000000000000000000000000000000000000000..10d10b62a49ad095f56d620620ee7eaa5d2fc62d Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..3595c4f9be5c0ce779f8dd611e7f6917ca518f5d Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a3336373b7aea1700b62d9aa60a15493586c3e8a Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..71079166b7bc51455b1e1d18ea4e5e942b3ae89f Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cbb633729cae09e5d65aefccd7b63c697f42b5cb Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eecfb70406fbaca61c7c9e5a549f77cbef2e849b Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a690bed6986df8a510ee4f05b2079264db7d71af Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5023670d73e75c539b0af285d35c4e9edaef2211 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eb3c2b0b82115e9820f781e944312b4c19b25ed4 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..705f285c9348d57ec059c73b90ed9836f4db6aa4 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..684cc24821451b65b9fc36376131490a9d03b37c Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..36f71a550601f4842536ad1a62a187a30c60eb69 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..b4f917b9687dc231c8f3b4cf3fc0a95e616846ea Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..878cc677337985f59ed9f4bb5cfcdb8ca4d0acbe Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..9afa6d70ae20b7082f786920e918fd70c138a5b3 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b626ceafb52c318581529fafbd1d33889f25c20 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..043052d031f59a3b289cc2bb7dda9b8352c58e11 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..316bdec6ab1a6ea4cd4dc33c9217cdf96e4c9049 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..73e95705d3152472d11fa2a5690626b652ff280a Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..7192fa0e49a0762ad44c218215e6197dd12197b2 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..4ecfb5dbec2f25d201de0a83d1143729830d49bd Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..072d06d46d38bcb7a63efb38075bf79ea111caf1 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e19ecdbd4d53bc0c09246f73926aaae70fe9493 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..08c1d314876730dc6f82ba65e741f03b8719609d Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..3427ee30e0e35e876eda5e5f1bed695d8c2636e9 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5facdd8e2f6e4f4fa3ceecc16962b3b15b9627c2 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..575355b83ce351f70a605a46ef5fa7f75f956835 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..39f5787fb3d458de0fd9f575345176b60e050961 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..a1e0f39535831b6f914098b776513c284c50b351 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..9637836909b5e59f7a83f88412a8571b4641f56b Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..93bc0ad010aab29e0a328d2ffafaeb60c82b3ecf Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..f06de9ed2afaa3668bccce11dcfe8924446817d0 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9f5b95529a5c7ac6044ffa4c60e6d412a1597104 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..9edc36a8a46a29b0e5a507fbd092a0e6629b8db7 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fe2973fcc47beca49e8e4061fe60a78c2a27102 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..0aedb22b8903d6eb70232c1e310266e0465b5f54 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..084d172fc3512a05145851e902ab1377a9d8b814 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..b740a4a827c0808baeda7112a5c40158e4eba664 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f99a4fc63360f9f5d1f0d751928b0b93d62ac81 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..75e3104ca9d784fa7fcaca27ecd239c02ac6e241 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3eab7c30fefb9c141226da5595a5faf9ff83d462 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..196cc61fca2b75593a6ac4cd38744fb6fcdc33bd Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..070baec335bcc35a79bd1ddf7ee52e809331c4c3 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..80221e424d28060c8620964fef488107c54f4e94 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c607cbdf717926be37b821d11c9c4b0ed9a2802 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..17262bba0ff1a57d9e893d832f8268389adfe340 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..30ef9922f2c8fe0c6a28cd108f33f9632fe82411 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b3a47cef61d53f9997148c1502dbb4ec313a6a47 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5b530ad1e484936df857104583ffcaf78b1715e3 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..96950282b9778bf3a3bd26d4c9125ffdc0f95c96 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b447f8aeb87c42e4489c0db15f30f93d4707fa45 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..11a94c16234d6e460c6e66a840ac7976e756c458 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d325421d690f179c4626844df98915a36e74ea Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0089e0cb796e6183029df4847b1b600d974a8b1a Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..6abfe2c3ec3923ddea06c4e6f4a4a440d72fa649 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar new file mode 100644 index 0000000000000000000000000000000000000000..99c263dc3f71a215a4a85901538f1dedacb51acf Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d9dbbe1b73c8f5435b7309f77520f6cbff82600c Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5d3b9086c238e3c3dbb2af68ce47f52eb269c664 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c03be12b0b2c7549c2db53ed275e45cd87e8a4dc Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0bf27883a28a42e50d17bca78ce368d79acaea13 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f440dcb8f735c055768cd95c2bc4457c4cbbb48 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e7adeb414fcb6108390beabc72d94e7f933e055d Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fba12107dee341bd5894a0e05deb09280561e517 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..48ec53f0dba9ea734d83d21f7d793bb2178bb8d1 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff161c12c4949a473a1d2ed1f597d6338337a321 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..577b2bb43f4750ecb4f5c1b9ba952d3866bcea22 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..10a28407ba7c16165164c35be0f9657812349a84 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d0ee5b30dbe7a8cb00c15600142ef6041a97f44 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e3c7750dd2c267fea46f510636ae576bf26894a Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a38e1850efbb73aaf248318253b11e0d0a64a50e Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9e257c119006bed0c77c8b8a6b2e05adfc71dde6 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a85e9e76c2870fca83485b31334a81a02f12781 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..548a6405f93dd3254c1123ecbcce2f96f6995e30 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f65840267108fe2b704976f14e3c17bd02290e20 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..68b053f16299ba0a9bcd80c6e795c2c9e0b186a1 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..89aaf8be3250033348a32ac75c1edc52d9d264ac Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..add785757c2a958f86e3a20c6034d46a38427d83 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e6f98b3a199f27f5c5ab47000862c0558116f3ef Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa88a2ec11f998225646b08795fedc215e530e6f Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cfd8c45d73d6d6e8da9c8fd24e04b2dcb86bff83 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6438a41b88c1605a994451cda5657f0a52669ad Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4bb4f9457907978ea011d1fc390c6ab2e6217888 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..494d0bb23d9ae6619c2a8f2e18ec458676f39316 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8ec4c28f2f004dc6d7b8ca0d36eeaa4b79f4a4bd Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d5bfe9c8b3c66f75327e6ce621da822c6b67fc Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..90607ec6492a8667ea6d2c39f77389e4083a2017 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8f8612553283b7f00d9dc7ab7eb5313b2f1eca2 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b0c51a8f29ef87a1c6390dd98c2e1e6aa0114f7 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1dcea88498ba439a3ac32e291067331c99331ace Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..92721ae048041c7dbddda0c88d417d740f3c299d Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3199cb971aa88ab386df667ad1d16d74d3bee22f Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c0ebec7fa19a8e96489714a2f9986017522becfb Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4696de7fd6cd077015cad5212b223de1dcc80307 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..04bc9cc78782c3b7bc78553481b94a10a02f8f9d Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..14d1445dba365c803f94f24f6fcb8467d13cf6e5 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5859e692a03f1173aa1f2b272a844c94d24c12e3 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c8cb76d3f0d9c0853168e740754f4d79042a105c Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b038ad4a83ac6df0e0a8f71265877dad2ded43ab Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c63764d56c7a95f9abf5230b80fae1a414651b61 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df5d82b54357c697c0d95fd9b8db277facfe0cde Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..07d3721a9c5c5e314e5eee21ec168139bcfed6c4 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8f7f3668d903b867c021d47fd08c02c45374e29c Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a774ecd7174486d0c6d077c388f1021c3b2ef05 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f90f28b0d6afa7518cd3095f64d8dbd9fd56b77d Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ba5add79fc5dee32ae8d3dfc0ffeebe75541b603 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..d32bd30fa7bfa74da1330810bc3eb3c07f174385 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..cc828adf40879d957e266f0b781f7bb3d864af07 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..08a59e5bb2d5572b0fdbe693f48a3f0a5073d4f0 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b821617242260f35360fb4247a01d66dea563004 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9908f1069718a85031db676224857e26dc7401f0 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d09f435a3cd1ae0395b4926c78311be6b276e0c2 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9bf5d12b59534df460f6ccb01dad8c8ffcd8a542 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3c5639431a5d51ef16ef94d82225effb271e8467 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d55e5f07ddae6c98b08fbff4848461fdbb2adf73 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..760d92fc11fe3672030cdd7c71416ad75b8ac775 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7504902f7621ee1764e207e2f5b2ffdeaa22e261 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..11867e294ef35ca6e9a6a82fb52dd52821c35d48 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e268ff4ef4543b76568d66444df43ce45751d3bc Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..662cf14d5c427f09ea259c484befd4cc2d328b2d Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..417d8b59b8ce5998e21e0275beb878ac50008bd8 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d3ab26e2476843ebb0d27c983443ff9a7ba3fee8 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f08a939bd2b90b9f87dc3f15ab88e15e8c48087e Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..abb08bb751b0e037476fc15d4da40a44d5432966 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57bc48ad35c44ab05ee4f8575f552ea0dd3b04c2 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..98d6fa439ad69c1dc9436d76d43e3a34636729ad Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..373471c5b69b1468fff9a50d614c1b84fa69fbdd Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff59cf0f6f80ce2f55e1f08afbd1750c7df6e2fc Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..49443391feae0e4b18c570a3a007db6187108031 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..723430b5a2624a415e2754bad5f69fd151e71dd3 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6169a6b468ab05de2c7962fc64ce8567f46c444 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b7cc8f08c3aa4845416101f90521f8736eef6973 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f1d68c985deb0d3b5c865a91ca3489e94d623dcf Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..99dc47ca15667630606af6d41f5e616acd66b168 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c7f07c892fbe634044ba8b8333aac2195d84a4b0 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..97a49b08f496d01d6a50e3b2455476c5b23ba50d Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ece3eb2d02159abb38ed4ea6b8fc4b599616c79e Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9ed9c88db7be8648c5b501b787a1addf9f0c1981 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b8cc43f8b9896a24d916d7bfdbdd02e396d7db1 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b232bcabeac47c30ac02371188d10bdb5d214a88 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8701dd9d98c6c7d61ea547823759f909474f7a9d Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..473ce54348410627710157274dd8d2fdfa39c0df Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..16255cf4093cd574a00574f812ee5109478fe9a4 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c3c712e90ce6ebdae145eec147d6d4a50bd0fe53 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..354030f4393f7f920b6e03ff894d8e5ea8727797 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..284a20484992c849fd9a5fb0f83f8f639f5d562f Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..467756a558a61103b2dc767e0833e561540ed8fd Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..477c706ef15c0f6622b55e64d3cb623cb3fd3ebd Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..305727ab77b9dcd6df44f8868efc6859b51a419e Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8c00aa871496b3293e1c008449162d449b9f64ea Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8a4fe6ed7c025ce502f81bbee92b36230457d647 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c322385c3404a0196e8d509f2f7cb892fcb0883a Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..2e158629507bc39be5c13b1b31fa3563e0ac44b1 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..b16bdbbc285bd848e6dac42208acfa3d6bfa6fab Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..0d015d54205c084ad31609cc2909853fee83476a Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..34c1d4394b358572a2c79b543cb7d094b0ba1b8d Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..316080f3b56134e8a821a50511b0e831886c9184 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5edfd71412f1470c93366d821b353ebaa85ecac3 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a3214715591141e2d6fbb0f5b71f52126a571a5 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a92e099d4fc25523e2830fa9b8181d319c9369a7 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a5b5b5fc36bb88093fd25a30b2d1d7fbe9e3cba Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..dbcbea90f1434f04d12c2039f9213c704d82ec31 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a5d9089734b0a7061dc14c4afc35884cc507636 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1e7da4be43dd1a10393608d1aff9c7a87460461 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..80670a1b87a7680fbac8c690d599361f8dd8d2ea Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..48242829bc38133a9cdcd36f8b2a9eebc53ab91a Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bffe4865f5835839900292dce062bb2f24921d76 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d8c4c56c7943f2fb3fcf2207f77f3bb7dd623550 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57b4cba6199355aa6c8fc9ad96e01f122e8613ff Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c4fe13ea86560b5be41f0602c8cef57dac8fed8 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d3e9ba64ae347356342ce7bd81694068426ce4 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..29fb4d66863e0ed67c991f16d7224d8e1b88ed98 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..83006f2ec04934050d5d41b033761f8c6f93c80a Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..66bc6931d673d72136bf7634d9d0653576bb05de Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fb9dadfc4843c7b3bd48544fe9c39068014ae49 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57eb121f34753b359ed328615b12d1335cdef5e0 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..666c3e4d030b41a58901b540d76547698cea41b4 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f5768a039af60a776a24b738659227810a5fc563 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b2d092cdf3c4b145df42a44b536ed0f8a604574c Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..08f353ce447773c14714390536f2abde814dff5b Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df32ff7e11ad182102a805c854bfc71cf2fec90e Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a93a240a399ca96f7f6298a849de2fdc996f066d Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8b05ebef8ea87d46d09eac112c1e8a5a9a549fc Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..eab628714f1e46bbabce09f59d5149071cb7b8ee Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bcf2ba147eb2b4d9c75a637aed2c96628a8369cd Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e12f44ed11522b1f45d2935972336fb6852cb26d Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7b0a2bef9c92372239c8136f4bacad2e44297a95 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..97e7161abe0534de78448dde1d119002283ced55 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a4e88280321db778bbdff6fce3aed8e6ce6e4980 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..da9302ff29a560b5f10d3184f25d699fe2d9c186 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..2bb7c07363c9a44ea63fe96c827a34e296b8188c Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..9a8ac703dcd1b00c37aa6f8dc9a8a9b3d42145f6 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar new file mode 100644 index 0000000000000000000000000000000000000000..f0bdebeb94bce461c49ded7e28d6e6c63bf6a367 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b3c8ff0adc42f592363a883cd691d292aada837 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa7fdabb307af8221e7e0a1526f2c97d6ba35ec4 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..49953e8fa25ed42f4127011561a6e84869fe5d82 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..d139727d04b6b6acdfcb520566c8c60cbbcb7fb1 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6077c71e50c276649060a8fac39f6384fa67019 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d2e53ffaa9f0685843fd2b35fe18afd543249a Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c9b38d9c315bae8eb8c7d4eeacb26a8f2ca16085 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..50933a1fdc4d2f285542845bb89f9b34cef192f2 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..45581840b5a7e858949ee6198f8a1f7bd772fb32 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar new file mode 100644 index 0000000000000000000000000000000000000000..e3432492d5d204a1745e138497d9ede28ffb854e Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d2ade3e92ab51059b64ce3026377e1bd5991777 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ed4bb45df0ea4a6cb61ce9c7247b6de29ab4f98a Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..816ea17eb611606001129921b297615bcd2419fd Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..ffc43704764cd535486c28401c92f2548bd52c5f Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..0e9fcc76760c4fc3fe5fbce69a4a37783d912a11 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..d7987d7c1b270f153557179abaf61c87ed62e875 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d218bacf88c766dba04ef14130fd7e69181ffed Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e90b7796c1a5d7264bc613050e05f20a674c010 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa3474de5d48916f2ded456deab5671a1375645b Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..657afffffdf6324a62474b9499dd50cf9529609f Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..26cbae47e5f0db078fc43501ed3d1b4f95a1befa Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a66a92ba5247b5a1f8e201d11643e4fb11c2a8c Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..8da196f20fde587682295ac0c90f31ba4ab23815 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..31cf0b60867242d385d764dcea99adadf7ed6ded Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..89c644b8e286e9da107d81de25f1be0fe6447607 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..d530cd128ec0d314490c0e1e5ef68479cd23d366 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..091b4d13a417d635f5a1d7a42b482f88a6f3bf65 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..e58c483f97589c9712eda2273a37e25344573390 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar new file mode 100644 index 0000000000000000000000000000000000000000..a2cb8020a5afda869b487e2f9d172dcd1e9795bf Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..511ff3211d9b29bce06c3576ddcf0139fc874bb0 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..7707e5878b8525da8750949186a3ab1056ecb5c5 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..275dd5700a389ba1902a02d49e465157942368ce Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/quarkus-app-dependencies.txt b/scorpio-broker/QueryManager/target/quarkus-app/quarkus-app-dependencies.txt new file mode 100644 index 0000000000000000000000000000000000000000..0aa0b1a4e39e9ac14c9739186a382f5a7784a7d6 --- /dev/null +++ b/scorpio-broker/QueryManager/target/quarkus-app/quarkus-app-dependencies.txt @@ -0,0 +1,258 @@ +com.aayushatharva.brotli4j:brotli4j::jar:1.16.0 +com.aayushatharva.brotli4j:native-linux-x86_64::jar:1.16.0 +com.aayushatharva.brotli4j:service::jar:1.16.0 +com.cronutils:cron-utils::jar:9.2.1 +com.fasterxml.jackson.core:jackson-annotations::jar:2.17.2 +com.fasterxml.jackson.core:jackson-core::jar:2.17.2 +com.fasterxml.jackson.core:jackson-databind::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-toml::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-yaml::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jdk8::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jsr310::jar:2.17.2 +com.fasterxml.jackson.module:jackson-module-parameter-names::jar:2.17.2 +com.github.ben-manes.caffeine:caffeine::jar:3.1.5 +com.github.filosganga:geogson-core::jar:1.2.21 +com.github.filosganga:geogson-jts::jar:1.2.21 +com.github.luben:zstd-jni::jar:1.5.6-3 +com.google.code.gson:gson::jar:2.11.0 +com.google.errorprone:error_prone_annotations::jar:2.30.0 +com.google.guava:failureaccess::jar:1.0.1 +com.google.guava:guava::jar:33.2.1-jre +com.google.j2objc:j2objc-annotations::jar:2.8 +com.ongres.scram:client::jar:2.1 +com.ongres.scram:common::jar:2.1 +com.ongres.stringprep:saslprep::jar:1.1 +com.ongres.stringprep:stringprep::jar:1.1 +com.vividsolutions:jts-core::jar:1.14.0 +commons-codec:commons-codec::jar:1.17.1 +commons-io:commons-io::jar:2.16.1 +eu.neclab.ngsildbroker:commons::jar:5.0.5-SNAPSHOT +io.agroal:agroal-api::jar:2.5 +io.agroal:agroal-narayana::jar:2.5 +io.agroal:agroal-pool::jar:2.5 +io.github.crac:org-crac::jar:0.1.3 +io.netty:netty-buffer::jar:4.1.111.Final +io.netty:netty-codec-dns::jar:4.1.111.Final +io.netty:netty-codec-haproxy::jar:4.1.111.Final +io.netty:netty-codec-http2::jar:4.1.111.Final +io.netty:netty-codec-http::jar:4.1.111.Final +io.netty:netty-codec-mqtt::jar:4.1.111.Final +io.netty:netty-codec-socks::jar:4.1.111.Final +io.netty:netty-codec::jar:4.1.111.Final +io.netty:netty-common::jar:4.1.111.Final +io.netty:netty-handler-proxy::jar:4.1.111.Final +io.netty:netty-handler::jar:4.1.111.Final +io.netty:netty-resolver-dns::jar:4.1.111.Final +io.netty:netty-resolver::jar:4.1.111.Final +io.netty:netty-transport-classes-epoll::jar:4.1.111.Final +io.netty:netty-transport-classes-kqueue::jar:4.1.111.Final +io.netty:netty-transport-native-epoll:linux-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-kqueue:osx-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-unix-common::jar:4.1.111.Final +io.netty:netty-transport::jar:4.1.111.Final +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api-incubator::jar:2.5.0-alpha +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api::jar:2.5.0 +io.opentelemetry.semconv:opentelemetry-semconv::jar:1.26.0-alpha +io.opentelemetry:opentelemetry-api-incubator::jar:1.39.0-alpha +io.opentelemetry:opentelemetry-api::jar:1.39.0 +io.opentelemetry:opentelemetry-context::jar:1.39.0 +io.quarkiverse.loggingmanager:quarkus-logging-manager::jar:3.1.2 +io.quarkiverse.systemd.notify:quarkus-systemd-notify::jar:1.0.2 +io.quarkus.arc:arc::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common-types::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-jackson::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-vertx::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive::jar:3.14.1 +io.quarkus.security:quarkus-security::jar:2.1.0 +io.quarkus.vertx.utils:quarkus-vertx-utils::jar:3.14.1 +io.quarkus:quarkus-agroal::jar:3.14.1 +io.quarkus:quarkus-arc::jar:3.14.1 +io.quarkus:quarkus-bootstrap-runner::jar:3.14.1 +io.quarkus:quarkus-cache-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-cache::jar:3.14.1 +io.quarkus:quarkus-caffeine::jar:3.14.1 +io.quarkus:quarkus-classloader-commons::jar:3.14.1 +io.quarkus:quarkus-container-image-docker-common::jar:3.14.1 +io.quarkus:quarkus-container-image-docker::jar:3.14.1 +io.quarkus:quarkus-container-image::jar:3.14.1 +io.quarkus:quarkus-core::jar:3.14.1 +io.quarkus:quarkus-credentials::jar:3.14.1 +io.quarkus:quarkus-datasource-common::jar:3.14.1 +io.quarkus:quarkus-datasource::jar:3.14.1 +io.quarkus:quarkus-development-mode-spi::jar:3.14.1 +io.quarkus:quarkus-flyway-postgresql::jar:3.14.1 +io.quarkus:quarkus-flyway::jar:3.14.1 +io.quarkus:quarkus-fs-util::jar:0.0.10 +io.quarkus:quarkus-info-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-info::jar:3.14.1 +io.quarkus:quarkus-jackson::jar:3.14.1 +io.quarkus:quarkus-jdbc-postgresql::jar:3.14.1 +io.quarkus:quarkus-jsonp::jar:3.14.1 +io.quarkus:quarkus-kafka-client::jar:3.14.1 +io.quarkus:quarkus-messaging-kafka::jar:3.14.1 +io.quarkus:quarkus-messaging-kotlin::jar:3.14.1 +io.quarkus:quarkus-messaging::jar:3.14.1 +io.quarkus:quarkus-mutiny-reactive-streams-operators::jar:3.14.1 +io.quarkus:quarkus-mutiny::jar:3.14.1 +io.quarkus:quarkus-narayana-jta::jar:3.14.1 +io.quarkus:quarkus-netty::jar:3.14.1 +io.quarkus:quarkus-reactive-datasource::jar:3.14.1 +io.quarkus:quarkus-reactive-pg-client::jar:3.14.1 +io.quarkus:quarkus-rest-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson::jar:3.14.1 +io.quarkus:quarkus-rest::jar:3.14.1 +io.quarkus:quarkus-scheduler-api::jar:3.14.1 +io.quarkus:quarkus-scheduler-common::jar:3.14.1 +io.quarkus:quarkus-scheduler-kotlin::jar:3.14.1 +io.quarkus:quarkus-scheduler-spi::jar:3.14.1 +io.quarkus:quarkus-scheduler::jar:3.14.1 +io.quarkus:quarkus-security-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-smallrye-context-propagation::jar:3.14.1 +io.quarkus:quarkus-smallrye-health::jar:3.14.1 +io.quarkus:quarkus-smallrye-metrics::jar:3.14.1 +io.quarkus:quarkus-smallrye-openapi::jar:3.14.1 +io.quarkus:quarkus-swagger-ui::jar:3.14.1 +io.quarkus:quarkus-tls-registry::jar:3.14.1 +io.quarkus:quarkus-transaction-annotations::jar:3.14.1 +io.quarkus:quarkus-vertx-http::jar:3.14.1 +io.quarkus:quarkus-vertx-latebound-mdc-provider::jar:3.14.1 +io.quarkus:quarkus-vertx::jar:3.14.1 +io.quarkus:quarkus-virtual-threads::jar:3.14.1 +io.smallrye.common:smallrye-common-annotation::jar:2.5.0 +io.smallrye.common:smallrye-common-classloader::jar:2.5.0 +io.smallrye.common:smallrye-common-constraint::jar:2.5.0 +io.smallrye.common:smallrye-common-cpu::jar:2.5.0 +io.smallrye.common:smallrye-common-expression::jar:2.5.0 +io.smallrye.common:smallrye-common-function::jar:2.5.0 +io.smallrye.common:smallrye-common-io::jar:2.5.0 +io.smallrye.common:smallrye-common-net::jar:2.5.0 +io.smallrye.common:smallrye-common-os::jar:2.5.0 +io.smallrye.common:smallrye-common-ref::jar:2.5.0 +io.smallrye.common:smallrye-common-vertx-context::jar:2.5.0 +io.smallrye.config:smallrye-config-common::jar:3.9.1 +io.smallrye.config:smallrye-config-core::jar:3.9.1 +io.smallrye.config:smallrye-config::jar:3.9.1 +io.smallrye.reactive:mutiny-reactive-streams-operators::jar:2.6.2 +io.smallrye.reactive:mutiny-smallrye-context-propagation::jar:2.6.2 +io.smallrye.reactive:mutiny-zero-flow-adapters::jar:1.1.0 +io.smallrye.reactive:mutiny-zero::jar:1.1.0 +io.smallrye.reactive:mutiny::jar:2.6.2 +io.smallrye.reactive:smallrye-mutiny-vertx-auth-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-bridge-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-core::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-mqtt::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-pg-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-runtime::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-sql-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-uri-template::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web::jar:3.14.0 +io.smallrye.reactive:smallrye-reactive-converter-api::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-converter-mutiny::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-messaging-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-camel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-health::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-in-memory::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-otel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-provider::jar:4.24.0 +io.smallrye.reactive:vertx-mutiny-generator::jar:3.14.0 +io.smallrye:jandex::jar:3.2.2 +io.smallrye:smallrye-context-propagation-api::jar:2.1.2 +io.smallrye:smallrye-context-propagation-jta::jar:2.1.2 +io.smallrye:smallrye-context-propagation-storage::jar:2.1.2 +io.smallrye:smallrye-context-propagation::jar:2.1.2 +io.smallrye:smallrye-fault-tolerance-vertx::jar:6.4.0 +io.smallrye:smallrye-health-api::jar:4.1.0 +io.smallrye:smallrye-health-provided-checks::jar:4.1.0 +io.smallrye:smallrye-health::jar:4.1.0 +io.smallrye:smallrye-metrics::jar:4.0.0 +io.smallrye:smallrye-open-api-core::jar:3.10.0 +io.vertx:vertx-auth-common::jar:4.5.9 +io.vertx:vertx-bridge-common::jar:4.5.9 +io.vertx:vertx-codegen::jar:4.5.9 +io.vertx:vertx-core::jar:4.5.9 +io.vertx:vertx-mqtt::jar:4.5.9 +io.vertx:vertx-pg-client::jar:4.5.9 +io.vertx:vertx-sql-client::jar:4.5.9 +io.vertx:vertx-uri-template::jar:4.5.9 +io.vertx:vertx-web-client::jar:4.5.9 +io.vertx:vertx-web-common::jar:4.5.9 +io.vertx:vertx-web::jar:4.5.9 +jakarta.activation:jakarta.activation-api::jar:2.1.3 +jakarta.annotation:jakarta.annotation-api::jar:3.0.0 +jakarta.el:jakarta.el-api::jar:5.0.1 +jakarta.enterprise:jakarta.enterprise.cdi-api::jar:4.1.0 +jakarta.enterprise:jakarta.enterprise.lang-model::jar:4.1.0 +jakarta.inject:jakarta.inject-api::jar:2.0.1 +jakarta.interceptor:jakarta.interceptor-api::jar:2.2.0 +jakarta.json:jakarta.json-api::jar:2.1.3 +jakarta.resource:jakarta.resource-api::jar:2.1.0 +jakarta.transaction:jakarta.transaction-api::jar:2.0.1 +jakarta.ws.rs:jakarta.ws.rs-api::jar:3.1.0 +jakarta.xml.bind:jakarta.xml.bind-api::jar:4.0.2 +org.apache.camel.quarkus:camel-quarkus-core::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-reactive-streams::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-smallrye-reactive-messaging::jar:3.14.0 +org.apache.camel:camel-api::jar:4.7.0 +org.apache.camel:camel-base-engine::jar:4.7.0 +org.apache.camel:camel-base::jar:4.7.0 +org.apache.camel:camel-componentdsl::jar:4.7.0 +org.apache.camel:camel-core-catalog::jar:4.7.0 +org.apache.camel:camel-core-engine::jar:4.7.0 +org.apache.camel:camel-core-languages::jar:4.7.0 +org.apache.camel:camel-core-model::jar:4.7.0 +org.apache.camel:camel-core-processor::jar:4.7.0 +org.apache.camel:camel-core-reifier::jar:4.7.0 +org.apache.camel:camel-endpointdsl::jar:4.7.0 +org.apache.camel:camel-main::jar:4.7.0 +org.apache.camel:camel-management-api::jar:4.7.0 +org.apache.camel:camel-microprofile-config::jar:4.7.0 +org.apache.camel:camel-reactive-streams::jar:4.7.0 +org.apache.camel:camel-support::jar:4.7.0 +org.apache.camel:camel-tooling-model::jar:4.7.0 +org.apache.camel:camel-util-json::jar:4.7.0 +org.apache.camel:camel-util::jar:4.7.0 +org.apache.camel:camel-xml-jaxp-util::jar:4.7.0 +org.apache.commons:commons-lang3::jar:3.14.0 +org.apache.httpcomponents:httpclient-cache::jar:4.5.14 +org.apache.httpcomponents:httpclient::jar:4.5.14 +org.apache.httpcomponents:httpcore::jar:4.4.16 +org.apache.kafka:kafka-clients::jar:3.7.1 +org.checkerframework:checker-qual::jar:3.46.0 +org.eclipse.microprofile.config:microprofile-config-api::jar:3.1 +org.eclipse.microprofile.context-propagation:microprofile-context-propagation-api::jar:1.3 +org.eclipse.microprofile.health:microprofile-health-api::jar:4.0.1 +org.eclipse.microprofile.metrics:microprofile-metrics-api::jar:4.0.1 +org.eclipse.microprofile.openapi:microprofile-openapi-api::jar:3.1.1 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-api::jar:3.0 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-core::jar:3.0 +org.eclipse.parsson:parsson::jar:1.1.7 +org.flywaydb:flyway-core::jar:10.17.1 +org.flywaydb:flyway-database-postgresql::jar:10.17.1 +org.glassfish.expressly:expressly::jar:5.0.0 +org.jboss.invocation:jboss-invocation::jar:2.0.0.Final +org.jboss.logging:commons-logging-jboss-logging::jar:1.0.0.Final +org.jboss.logging:jboss-logging-annotations::jar:3.0.1.Final +org.jboss.logging:jboss-logging::jar:3.6.0.Final +org.jboss.logmanager:jboss-logmanager::jar:3.0.6.Final +org.jboss.narayana.jta:narayana-jta::jar:7.0.2.Final +org.jboss.narayana.jts:narayana-jts-integration::jar:7.0.2.Final +org.jboss.slf4j:slf4j-jboss-logmanager::jar:2.0.0.Final +org.jboss.threads:jboss-threads::jar:3.6.1.Final +org.jboss:jboss-transaction-spi::jar:8.0.0.Final +org.jctools:jctools-core::jar:4.0.5 +org.locationtech.jts:jts-core::jar:1.18.2 +org.locationtech.spatial4j:spatial4j::jar:0.8 +org.lz4:lz4-java::jar:1.8.0 +org.noggit:noggit::jar:0.8 +org.postgresql:postgresql::jar:42.7.4 +org.reactivestreams:reactive-streams::jar:1.0.4 +org.slf4j:slf4j-api::jar:2.0.6 +org.wildfly.common:wildfly-common::jar:1.7.0.Final +org.xerial.snappy:snappy-java::jar:1.1.10.5 +org.yaml:snakeyaml::jar:2.2 diff --git a/scorpio-broker/QueryManager/target/quarkus-app/quarkus-run.jar b/scorpio-broker/QueryManager/target/quarkus-app/quarkus-run.jar new file mode 100644 index 0000000000000000000000000000000000000000..18bd9f2c6a798a495dca5851d72840b6781dbbc1 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/quarkus-run.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/quarkus/generated-bytecode.jar b/scorpio-broker/QueryManager/target/quarkus-app/quarkus/generated-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..60224bd64676bf003a0191cc69a0d651e7f83720 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/quarkus/generated-bytecode.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/quarkus/quarkus-application.dat b/scorpio-broker/QueryManager/target/quarkus-app/quarkus/quarkus-application.dat new file mode 100644 index 0000000000000000000000000000000000000000..6a2b2801e08a2dd5fba188e3cc58643001592d65 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/quarkus/quarkus-application.dat differ diff --git a/scorpio-broker/QueryManager/target/quarkus-app/quarkus/transformed-bytecode.jar b/scorpio-broker/QueryManager/target/quarkus-app/quarkus/transformed-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..240ec57ee8679b58c8296a98ca24578b5b7e64e2 Binary files /dev/null and b/scorpio-broker/QueryManager/target/quarkus-app/quarkus/transformed-bytecode.jar differ diff --git a/scorpio-broker/QueryManager/target/quarkus-artifact.properties b/scorpio-broker/QueryManager/target/quarkus-artifact.properties new file mode 100644 index 0000000000000000000000000000000000000000..b837ca211d68c0d37948257a73c4739e2ea5c4d8 --- /dev/null +++ b/scorpio-broker/QueryManager/target/quarkus-artifact.properties @@ -0,0 +1,4 @@ +# Generated by Quarkus - Do not edit manually +metadata.container-image=scorpiobroker/query-manager\:5.0.5-SNAPSHOT +metadata.pull-required=false +type=jar-container diff --git a/scorpio-broker/QueryManager/target/query-manager-5.0.5-SNAPSHOT.jar b/scorpio-broker/QueryManager/target/query-manager-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..4b3eff07d2c780d47d3377b16515bf11dc034ecc Binary files /dev/null and b/scorpio-broker/QueryManager/target/query-manager-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/QueryManager/target/test-classes/eu/neclab/ngsildbroker/queryhandler/controller/CustomProfile.class b/scorpio-broker/QueryManager/target/test-classes/eu/neclab/ngsildbroker/queryhandler/controller/CustomProfile.class new file mode 100644 index 0000000000000000000000000000000000000000..841f63437854fb7aedaae13bcefb018240aa3ba6 Binary files /dev/null and b/scorpio-broker/QueryManager/target/test-classes/eu/neclab/ngsildbroker/queryhandler/controller/CustomProfile.class differ diff --git a/scorpio-broker/QueryManager/target/test-classes/eu/neclab/ngsildbroker/queryhandler/controller/QueryControllerTest.class b/scorpio-broker/QueryManager/target/test-classes/eu/neclab/ngsildbroker/queryhandler/controller/QueryControllerTest.class new file mode 100644 index 0000000000000000000000000000000000000000..07a2be7663beb28cdb5ec5576b7cdcb5388a614f Binary files /dev/null and b/scorpio-broker/QueryManager/target/test-classes/eu/neclab/ngsildbroker/queryhandler/controller/QueryControllerTest.class differ diff --git a/scorpio-broker/QueryManager/target/test-classes/eu/neclab/ngsildbroker/queryhandler/services/QueryServiceTest.class b/scorpio-broker/QueryManager/target/test-classes/eu/neclab/ngsildbroker/queryhandler/services/QueryServiceTest.class new file mode 100644 index 0000000000000000000000000000000000000000..5b0c909306ef3c468bbf0c9b0be40434469e4df8 Binary files /dev/null and b/scorpio-broker/QueryManager/target/test-classes/eu/neclab/ngsildbroker/queryhandler/services/QueryServiceTest.class differ diff --git a/scorpio-broker/README.ja.md b/scorpio-broker/README.ja.md new file mode 100644 index 0000000000000000000000000000000000000000..c96804c79cce79d6339dd90d8f20d0560c1b5857 --- /dev/null +++ b/scorpio-broker/README.ja.md @@ -0,0 +1,240 @@ +# Scorpio NGSI-LD Broker + +[![FIWARE Core](https://nexus.lab.fiware.org/static/badges/chapters/core.svg)](https://www.fiware.org/developers/catalogue/) +[![License: BSD-4-Clause](https://img.shields.io/badge/license-BSD%204%20Clause-blue.svg)](https://spdx.org/licenses/BSD-4-Clause.html) +[![Docker](https://img.shields.io/docker/pulls/scorpiobroker/scorpio.svg)](https://hub.docker.com/r/scorpiobroker/scorpio/) +[![fiware](https://nexus.lab.fiware.org/repository/raw/public/badges/stackoverflow/fiware.svg)](https://stackoverflow.com/questions/tagged/fiware) +[![NGSI LD](https://img.shields.io/badge/NGSI-LD-red.svg)](https://www.etsi.org/deliver/etsi_gs/CIM/001_099/009/01.02.02_60/gs_CIM009v010202p.pdf) +
+[![Documentation badge](https://img.shields.io/readthedocs/scorpio.svg)](https://scorpio.readthedocs.io/en/latest/?badge=latest) +![Status](https://nexus.lab.fiware.org/static/badges/statuses/full.svg) +![Travis-CI](https://travis-ci.org/ScorpioBroker/ScorpioBroker.svg?branch=master) + +Scorpio ã¯ã€NEC Laboratories Europe 㨠NEC Technologies India ã«ã‚ˆã£ã¦é–‹ç™ºã•れ㟠NGSI-LD 準拠ã®ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆãƒ–ローカー +ã§ã™ã€‚ã‚¯ãƒ­ã‚¹ã‚«ãƒƒãƒ†ã‚£ãƒ³ã‚°ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆæƒ…å ±ç®¡ç† ([ETSI ISG CIM](https://www.etsi.org/committee/cim)) ã«é–¢ã™ã‚‹ ETSI Industry +Specification Group (ETSI ISG) ã«ã‚ˆã£ã¦æŒ‡å®šã•れãŸå®Œå…¨ãª +[NGSI-LD API](https://www.etsi.org/deliver/etsi_gs/CIM/001_099/009/01.02.02_60/gs_CIM009v010202p.pdf) を実装ã—ã¾ã™ã€‚ + +NGSI-LD API ã¯ã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆæƒ…å ±ã®ç®¡ç†ã€ã‚¢ã‚¯ã‚»ã‚¹ã€ãŠã‚ˆã³ãƒ‡ã‚£ã‚¹ã‚«ãƒãƒªãƒ¼ã‚’å¯èƒ½ã«ã—ã¾ã™ã€‚コンテキスト情報ã¯ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ +(建物ãªã©) ã¨ãã®ãƒ—ロパティ (使‰€ã‚„地ç†çš„ä½ç½®ãªã©) ãŠã‚ˆã³ãƒªãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã‚·ãƒƒãƒ— (所有者ãªã©) ã§æ§‹æˆã•れã¾ã™ã€‚ã—ãŸãŒã£ã¦ã€ +Scorpio を使用ã™ã‚‹ã¨ã€ã‚¢ãƒ—リケーションã¨ã‚µãƒ¼ãƒ“スã¯ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆæƒ…報をリクエストã§ãã¾ã™ã€‚ã¤ã¾ã‚Šã€å¿…è¦ãªã‚‚ã®ã€å¿…è¦ãªæ™‚期〠+å¿…è¦ãªæ–¹æ³•ãªã©ã§ã™ã€‚ + +NGSI-LD API ã®æ©Ÿèƒ½ã¯æ¬¡ã®ã¨ãŠã‚Šã§ã™: + +- コンテキスト情報を作æˆã€æ›´æ–°ã€è¿½åŠ ã€ãŠã‚ˆã³å‰Šé™¤ã—ã¾ã™ã€‚ +- フィルタリングã€åœ°ç†çš„スコープã€ãƒšãƒ¼ã‚¸ãƒ³ã‚°ãªã©ã®ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆæƒ…報をクエリã—ã¾ã™ã€‚ +- コンテキスト情報ã®å¤‰æ›´ã‚’サブスクライブã—ã€éžåŒæœŸãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションをå—ä¿¡ã—ã¾ã™ã€‚ +- コンテキスト情報ã®ã‚½ãƒ¼ã‚¹ã‚’レジストレーションãŠã‚ˆã³ãƒ‡ã‚£ã‚¹ã‚«ãƒãƒªãƒ¼ã—ã¾ã™ã€‚ã“れã«ã‚ˆã‚Šã€ + 分散展開ãŠã‚ˆã³ãƒ•ェデレーション展開を構築ã§ãã¾ã™ã€‚ + +Scorpio 㯠FIWARE Generic Enabler ã§ã™ã€‚ã—ãŸãŒã£ã¦ã€"Powered by FIWARE " ã®ãƒ—ラットフォームã®ä¸€éƒ¨ã¨ã—ã¦çµ±åˆã§ãã¾ã™ã€‚ +FIWARE ã¯ã€ã‚ªãƒ¼ãƒ—ンソースプラットフォームコンãƒãƒ¼ãƒãƒ³ãƒˆã®ç²¾é¸ã•れãŸãƒ•レームワークã§ã‚りã€ä»–ã®ã‚µãƒ¼ãƒ‰ãƒ‘ーティ +プラットフォームコンãƒãƒ¼ãƒãƒ³ãƒˆã¨ä¸€ç·’ã«çµ„ã¿ç«‹ã¦ã¦ã€ã‚¹ãƒžãƒ¼ãƒˆã‚½ãƒªãƒ¥ãƒ¼ã‚·ãƒ§ãƒ³ã®é–‹ç™ºã‚’加速ã™ã‚‹ã“ã¨ãŒã§ãã¾ã™ã€‚ã“ã® FIWARE GE +ã®ãƒ­ãƒ¼ãƒ‰ãƒžãƒƒãƒ—ã¯[ã“ã¡ã‚‰](./docs/roadmap.ja.md)ã«è¨˜è¿°ã•れã¦ã„ã¾ã™ã€‚ + +詳細ã«ã¤ã„ã¦ã¯ã€[FIWARE developers](https://developers.fiware.org/) ã® Web サイトãŠã‚ˆã³ [FIWARE](https://fiware.org/) +Web サイトをå‚ç…§ã—ã¦ãã ã•ã„。FIWARE GEs ãŠã‚ˆã³ Incubated FIWARE GEs ã®å®Œå…¨ãªãƒªã‚¹ãƒˆã¯ã€ +[FIWARE Catalogue](https://catalogue.fiware.org/) ã«ã‚りã¾ã™ã€‚ + +| :books: [ドキュメンテーション](https://scorpio.rtfd.io/) | :mortar_board: [アカデミー](https://fiware-academy.readthedocs.io/en/latest/core/scorpio) | :whale: [Docker Hub](https://hub.docker.com/r/scorpiobroker/scorpio/) | :dart: [ロードマップ](./docs/roadmap.ja.md) | +| ------------------------------------------------- | --------------------------------------------------------------------- | --------------------------------------------------------------------- | --------------------------------------------------------------------- | + +## コンテンツ + +- [ãƒãƒƒã‚¯ã‚°ãƒ©ã‚¦ãƒ³ãƒ‰](#background) +- [ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã¨æ§‹ç¯‰](#installation-and-building) +- [使用方法](#usage) +- [API ウォークスルー](#api-walkthrough) +- [テスト](#tests) +- [ãã®ä»–ã®ãƒªã‚½ãƒ¼ã‚¹](#further-resources) +- [è¬è¾ž](#acknowledgements) +- [クレジット](#credit-where-credit-is-due) +- [行動è¦ç¯„](#code-of-conduct) +- [ライセンス](#license) + + + +## ãƒãƒƒã‚¯ã‚°ãƒ©ã‚¦ãƒ³ãƒ‰ + +Scorpio ã¯ã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆæƒ…å ±ã®ç®¡ç†ã¨ãƒªã‚¯ã‚¨ã‚¹ãƒˆã‚’å¯èƒ½ã«ã™ã‚‹ NGSI-LD broker ã§ã™ã€‚æ¬¡ã®æ©Ÿèƒ½ã‚’サãƒãƒ¼ãƒˆã—ã¾ã™: + +- コンテキストプロデューサーã¯ã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆæƒ…å ±ã®ä½œæˆã€æ›´æ–°ã€è¿½åŠ ã€å‰Šé™¤ãªã©ã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚’管ç†ã§ãã¾ã™ã€‚ +- コンテキストコンシューマーã¯ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚’識別ã™ã‚‹ã‹ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚¿ã‚¤ãƒ—ã‚’æä¾›ã™ã‚‹ã“ã¨ã§é–¢é€£ã™ã‚‹ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚’ + 検出ã—ã€GeoJSON 機能ã¨ã—ã¦æä¾›ã•ã‚Œã‚‹ãƒ—ãƒ­ãƒ‘ãƒ†ã‚£å€¤ã€æ—¢å­˜ã®ãƒªãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã‚·ãƒƒãƒ—ã€åœ°ç†çš„範囲ã«å¾“ã£ã¦ãƒ•ィルタリングã™ã‚‹ + ã“ã¨ã§ã€å¿…è¦ãªã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆæƒ…å ±ã‚’è¦æ±‚ã§ãã¾ã™ã€‚ +- åŒæœŸã‚¯ã‚¨ãƒªå¿œç­”ã¨éžåŒæœŸã‚µãƒ–スクライブ/ノーティフィケーションã®2ã¤ã®å¯¾è©±ã‚¹ã‚¿ã‚¤ãƒ«ãŒã‚µãƒãƒ¼ãƒˆã•れã¦ãŠã‚Šã€ + ノーティフィケーションã¯ãƒ—ロパティやリレーションシップã®å¤‰æ›´ã€ã¾ãŸã¯å›ºå®šæ™‚é–“é–“éš”ã«åŸºã¥ãã“ã¨ãŒã§ãã¾ã™ã€‚ +- Scorpio ã¯ã€æŒ‡å®šã•ã‚ŒãŸæ™‚é–“é–“éš”å†…ã«æ¸¬å®šã•れãŸãƒ—ロパティ値ãªã©ã®å±¥æ­´æƒ…å ±ã‚’è¦æ±‚ã™ã‚‹ãŸã‚ã® NGSI-LD ã®ã‚ªãƒ—ション㮠+ 時間インターフェイス (Temporal interface) を実装ã—ã¾ã™ã€‚ +- Scorpio ã¯ã€é›†ä¸­åž‹ã€åˆ†æ•£åž‹ã€ãŠã‚ˆã³çµ±åˆåž‹ã‚’å«ã‚€è¤‡æ•°ã®å±•é–‹æ§‹æˆã‚’サãƒãƒ¼ãƒˆã—ã¾ã™ã€‚上記ã®ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆãƒ—ロデューサー㫠+ 加ãˆã¦ã€ãれ自体㌠NGSI-LD インターフェイスを実装ã™ã‚‹ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚½ãƒ¼ã‚¹ãŒå­˜åœ¨ã™ã‚‹å¯èƒ½æ€§ãŒã‚りã¾ã™ã€‚ã“れら㮠+ コンテキストソースã¯ã€è¦æ±‚ã«å¿œã˜ã¦æä¾›ã§ãる情報 (情報 (値) 自体ã§ã¯ãªã„) ã«è‡ªåˆ†è‡ªèº«ã‚’レジストレーションã§ãã¾ã™ã€‚ + 分散設定㮠Scorpio Broker ã¯ã€ãƒ¬ã‚¸ã‚¹ãƒˆãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã«åŸºã¥ã„ã¦è¦æ±‚ã«å¿œç­”ã™ã‚‹ãŸã‚ã®æƒ…報をæŒã¤å¯èƒ½æ€§ã®ã‚るコンテキスト + ソースを検出ã—ã€ã•ã¾ã–ã¾ãªã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚½ãƒ¼ã‚¹ã‹ã‚‰ã®æƒ…å ±ã‚’è¦æ±‚ãŠã‚ˆã³é›†ç´„ã—ã¦ã€è¦æ±‚ã—ã¦ã„るコンテキストコンシューマー㫠+ æä¾›ã§ãã¾ã™ã€‚ +- フェデレーション設定ã§ã¯ã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚½ãƒ¼ã‚¹è‡ªä½“ã‚’ NGSI-LD broker ã«ã™ã‚‹ã“ã¨ãŒã§ãã¾ã™ã€‚フェデレーションを使用ã—ã¦ã€ + 情報を (部分的ã«) 共有ã—ãŸã„複数ã®ãƒ—ロãƒã‚¤ãƒ€ãƒ¼ã‹ã‚‰ã®æƒ…報を組ã¿åˆã‚ã›ã‚‹ã“ã¨ãŒã§ãã¾ã™ã€‚é‡è¦ãªé•ã„ã¯ã€é€šå¸¸ã€ + レジストレーションã®ç²’度ã«ã‚りã¾ã™ã€‚ãŸã¨ãˆã°ã€"建物 A ã«é–¢ã™ã‚‹æƒ…å ±ãŒã‚ã‚‹" ã§ã¯ãªãã€"地ç†çš„領域内㮠+ エンティティタイプã®å»ºç‰©ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã«é–¢ã™ã‚‹æƒ…å ±ãŒã‚ã‚‹" ãªã©ã§ã™ã€‚ +- Scorpio ã¯ã€å‰è¿°ã®ã™ã¹ã¦ã®ãƒ‡ãƒ—ロイメント構æˆã‚’サãƒãƒ¼ãƒˆã—ã¾ã™ã€‚ã—ãŸãŒã£ã¦ã€ã‚¹ã‚±ãƒ¼ãƒ©ãƒ“リティã¨ã€é€²åŒ–çš„ãªæ–¹æ³•ã§ + シナリオを拡張ã™ã‚‹å¯èƒ½æ€§ã‚’æä¾›ã—ã¾ã™ã€‚ãŸã¨ãˆã°ã€2ã¤ã®åˆ¥ã€…ã®ãƒ‡ãƒ—ロイメントを組ã¿åˆã‚ã›ãŸã‚Šã€ã‚¹ã‚±ãƒ¼ãƒ©ãƒ“リティ㮠+ ç†ç”±ã‹ã‚‰ã€ç•°ãªã‚‹ãƒ–ローカーを使用ã—ãŸã‚Šã§ãã¾ã™ã€‚ã“れã¯ã€å˜ä¸€ã®ã‚¢ã‚¯ã‚»ã‚¹ãƒã‚¤ãƒ³ãƒˆã‚’引ãç¶šã使用ã§ãるコンテキスト + コンシューマーã«å¯¾ã—ã¦å®Œå…¨ã«é€éŽçš„ã§ã™ã€‚ + + + +## ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã¨æ§‹ç¯‰ + +Scorpio ã¯ã€ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スフレームワークã¨ã—㦠Spring Cloud を使用ã—ã€ãƒ“ルドツールã¨ã—㦠Apache Maven を使用ã—㦠+Java ã§é–‹ç™ºã•れã¦ã„ã¾ã™ã€‚メッセージãƒã‚¹ã¨ã—㦠Apache Kafka ãŒå¿…è¦ã§ã‚りã€ãƒ‡ãƒ¼ã‚¿ãƒ™ãƒ¼ã‚¹ã¨ã—㦠PostGIS 拡張機能を備ãˆãŸ +Postgres ãŒå¿…è¦ã§ã™ã€‚ + +Scorpio ã«å¿…è¦ãªã‚½ãƒ•トウェアコンãƒãƒ¼ãƒãƒ³ãƒˆã®ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«æ–¹æ³•ã«é–¢ã™ã‚‹æƒ…å ±ã¯ã€ +[インストールガイド](./docs/ja/source/installationGuide.rst) ã«è¨˜è¼‰ã•れã¦ã„ã¾ã™ã€‚Scorpio ã®æ§‹ç¯‰ã¨å®Ÿè¡Œã«ã¤ã„ã¦ã¯ã€ +[Scorpio ã®æ§‹ç¯‰ã¨å®Ÿè¡Œã‚¬ã‚¤ãƒ‰](./docs/ja/source/buildScorpio.rst)ã‚’å‚ç…§ã—ã¦ãã ã•ã„。 + + + +## 使用方法 + +デフォルトã§ã¯ã€ãƒ–ローカーã¯ãƒãƒ¼ãƒˆ 9090 ã§å®Ÿè¡Œã•れã€ãƒ–ローカーã¨ã®å¯¾è©±ã®ãƒ™ãƒ¼ã‚¹ URL 㯠http://localhost:9090/ngsi-ld/v1/ +ã«ãªã‚Šã¾ã™ã€‚ + +### ç°¡å˜ãªä¾‹ + +一般的ã«ã€æ¬¡ã®ã‚ˆã†ãªãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã‚’使用ã—㦠HTTP POST リクエストを http://localhost:9090/ngsi-ld/v1/entities/ ã«é€ä¿¡ã™ã‚‹ +ã“ã¨ã§ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚’作æˆã§ãã¾ã™: + +```json +{ + "id": "urn:ngsi-ld:testunit:123", + "type": "AirQualityObserved", + "dateObserved": { + "type": "Property", + "value": { + "@type": "DateTime", + "@value": "2018-08-07T12:00:00Z" + } + }, + "NO2": { + "type": "Property", + "value": 22, + "unitCode": "GP", + "accuracy": { + "type": "Property", + "value": 0.95 + } + }, + "refPointOfInterest": { + "type": "Relationship", + "object": "urn:ngsi-ld:PointOfInterest:RZ:MainSquare" + }, + "@context": [ + "https://schema.lab.fiware.org/ld/context", + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld" + ] +} +``` + +ã“ã®ä¾‹ã§ã¯ã€`@context` ãŒãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã«ã‚ã‚‹ãŸã‚ã€`Content-Type` ヘッダーを `application/ld+json` ã«è¨­å®šã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ + +エンティティをå—ä¿¡ã™ã‚‹ã«ã¯ã€æ¬¡ã®ã‚ˆã†ãª HTTP GET ã‚’é€ä¿¡ã—ã¾ã™: + +`http://localhost:9090/ngsi-ld/v1/entities/` + +ã¾ãŸã¯ã€æ¬¡ã®ã‚ˆã†ãª GET ã‚’é€ä¿¡ã—ã¦ã‚¯ã‚¨ãƒªã‚’実行ã—ã¾ã™: + +```text +http://localhost:9090/ngsi-ld/v1/entities/?type=Vehicle&limit=2 +Accept: application/ld+json +Link: /aggregatedContext.jsonld>; rel="http://www.w3.org/ns/json-ld#context";type="application/ld+json" +``` + + + +## API ウォークスルー + +Scorpio ãŒæä¾›ã™ã‚‹ NGSI-LD API を使用ã—ã¦å®Ÿè¡Œã§ãã‚‹ã“ã¨ã®è©³ç´°ãªä¾‹ã¯ã€ +[API ウォークスルー](./docs/ja/source/API_walkthrough.rst) ã«ã‚りã¾ã™ã€‚ + + + +## テスト + +Scorpio ã«ã¯2セットã®ãƒ†ã‚¹ãƒˆãŒã‚りã¾ã™ã€‚ユニットテストã«ã¯ JUnit を使用ã—ã€ã‚·ã‚¹ãƒ†ãƒ ãƒ†ã‚¹ãƒˆã«ã¯ npm テストベース㮠+FIWARE NGSI-LD テストスイートを使用ã—ã¾ã™ã€‚テストã®è©³ç´°ã«ã¤ã„ã¦ã¯ã€[テストガイド](./docs/ja/source/testing.rst)ã‚’ +ã”覧ãã ã•ã„。 + + + +## ãã®ä»–ã®ãƒªã‚½ãƒ¼ã‚¹ + +NGSI-L Dã¾ãŸã¯ JSON-LD ã®è©³ç´°ã«ã¤ã„ã¦ã¯ã€æ¬¡ã‚’å‚ç…§ãã ã•ã„: + +- [ETSI NGSI-LD 仕様](https://www.etsi.org/deliver/etsi_gs/CIM/001_099/009/01.02.02_60/gs_CIM009v010202p.pdf) +- [ETSI NGSI-LD 入門](https://www.etsi.org/deliver/etsi_gr/CIM/001_099/008/01.01.01_60/gr_CIM008v010101p.pdf) +- [JSON-LD ウェブサイト](https://json-ld.org/) +- [FIWARE Academy Scorpio](https://fiware-academy.readthedocs.io/en/latest/core/scorpio/index.html) + - [FIWARE 601: Introduction to Linked Data](https://fiware-tutorials.readthedocs.io/en/latest/linked-data) + - [FIWARE 602: Linked Data Relationships and Data Models](https://fiware-tutorials.readthedocs.io/en/latest/relationships-linked-data) + +- [FIWARE global summit: The Scorpio NGSI-LD Broker. Features and supported architectures](https://www.slideshare.net/FI-WARE/fiware-global-summit-the-scorpio-ngsild-broker-features-and-supported-architectures) +- [FIWARE global summit: NGSI-LD. An evolution from NGSI V2](https://www.slideshare.net/FI-WARE/fiware-global-summit-ngsild-an-evolution-from-ngsiv2) + +一連ã®ã‚µãƒ³ãƒ—ル呼ã³å‡ºã—ã¯ã€Postman コレクションã¨ã—㦠Examples フォルダーã«ã‚りã¾ã™ã€‚ã“れらã®ä¾‹ã§ã¯2ã¤ã®å¤‰æ•°ã‚’使用ã—㦠+ã„ã¾ã™: + +- gatewayServer 㯠`:` ã§ã‚ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ローカルã§ãƒ‡ãƒ•ォルト設定を使用ã™ã‚‹å ´åˆã¯ã€ + localhost:9090 ã«ãªã‚Šã¾ã™ã€‚ +- link, Link header を介ã—㦠@context ã‚’æä¾›ã™ã‚‹ä¾‹ã§ã™ã€‚例ã§ã¯ã€Example @context をホストã—ã¾ã™ã€‚ + https://raw.githubusercontent.com/ScorpioBroker/ScorpioBroker/master/Examples/index.json ã¸ã®ãƒªãƒ³ã‚¯ã‚’設定ã—ã¾ã™ã€‚ + + + +## è¬è¾ž + +### EU Acknowledgetment + +ã“ã®æ´»å‹•ã¯ã€åŠ©æˆé‡‘契約 No. 731993 (Autopilot), No. 814918 (Fed4IoT)ã€ãŠã‚ˆã³ No. 767498 (MIDIH, Open Call (MoLe)) +ã«åŸºã¥ã欧州連åˆã® Horizon 2020 研究ãŠã‚ˆã³ã‚¤ãƒŽãƒ™ãƒ¼ã‚·ãƒ§ãƒ³ãƒ—ログラムã‹ã‚‰è³‡é‡‘æä¾›ã‚’å—ã‘ã¦ã„ã¾ã™ã€‚ + + + +- [AUTOPILOT project: Automated driving Progressed by Internet Of Things](https://autopilot-project.eu/) +- [Fed4IoT project](https://fed4iot.org/) +- [MIDIH Project](https://midih.eu/), Open Call (MoLe) + + + +## クレジット + +Scorpio ã«è²¢çŒ®ã—ã¦ãれãŸã™ã¹ã¦ã®äººã«æ„Ÿè¬ã—ã¾ã™ã€‚ã“れã¯ã€Scorpio 開発ãƒãƒ¼ãƒ å…¨ä½“ã¨ã™ã¹ã¦ã®å¤–部貢献者ã«å½“ã¦ã¯ã¾ã‚Šã¾ã™ã€‚ +完全ãªãƒªã‚¹ãƒˆã«ã¤ã„ã¦ã¯ã€[CREDITS](./CREDITS) ファイルをã”覧ãã ã•ã„。 + + + +## 行動è¦ç¯„ + +FIWARE コミュニティã®ä¸€éƒ¨ã¨ã—ã¦ã€ç§ãŸã¡ã¯ [FIWARE 行動è¦ç¯„](https://www.fiware.org/foundation/code-of-conduct/) ã‚’ +éµå®ˆã™ã‚‹ãŸã‚ã«æœ€å–„ã‚’å°½ãã—ã€è²¢çŒ®è€…ã«ã‚‚åŒã˜ã“ã¨ã‚’期待ã—ã¦ã„ã¾ã™ã€‚ + +ã“れã«ã¯ã€ãƒ—ルリクエストã€ã‚¤ã‚·ãƒ¥ãƒ¼ã€ã‚³ãƒ¡ãƒ³ãƒˆã€ã‚³ãƒ¼ãƒ‰ã€ãŠã‚ˆã³ã‚³ãƒ¼ãƒ‰å†…コメントãŒå«ã¾ã‚Œã¾ã™ã€‚ + +ã“ã®ãƒªãƒã‚¸ãƒˆãƒªã®æ‰€æœ‰è€…ã¨ã—ã¦ã€ã“ã“ã§ã®ã‚³ãƒŸãƒ¥ãƒ‹ã‚±ãƒ¼ã‚·ãƒ§ãƒ³ã¯ç´”粋㫠Scorpio 㨠NGSI-LD 関連ã®ãƒˆãƒ”ックã«é™å®šã—ã¾ã™ã€‚ + +ç§ãŸã¡ã¯çš†ã€ç•°ãªã‚‹æ–‡åŒ–的背景ã‹ã‚‰æ¥ãŸäººé–“ã§ã™ã€‚ç§ãŸã¡ã¯çš†ã€ã•ã¾ã–ã¾ãªç™–ã€ç¿’æ…£ã€ãƒžãƒŠãƒ¼ã‚’æŒã£ã¦ã„ã¾ã™ã€‚ãã®ãŸã‚ã€èª¤è§£ãŒ +生ã˜ã‚‹å¯èƒ½æ€§ãŒã‚りã¾ã™ã€‚Scorpio 㨠NGSI-LD ã‚’å‰é€²ã•ã›ã‚‹ãŸã‚ã«ã€ã‚³ãƒŸãƒ¥ãƒ‹ã‚±ãƒ¼ã‚·ãƒ§ãƒ³ãŒå–„æ„ã§è¡Œã‚れã¦ã„ã‚‹ã“ã¨ã«ç–‘ã„ã®ä½™åœ°ã¯ +ã‚りã¾ã›ã‚“。寄稿者ã«ã‚‚åŒã˜ã“ã¨ã‚’期待ã—ã¦ã„ã¾ã™ã€‚ã—ã‹ã—ã€èª°ã‹ãŒç¹°ã‚Šè¿”ã—æŒ‘発ã—ãŸã‚Šã€æ”»æ’ƒã—ãŸã‚Šã€è­°è«–を変ãˆãŸã‚Šã€èª°ã‹ã‚’ +嘲笑ã—ãŸã‚Šã—よã†ã¨ã—ã¦ã„ã‚‹å ´åˆã€ç§ãŸã¡ã¯è‡ªåˆ†ã®å®¶ã‚’æ­£ã—ã利用ã—ã€ã“れã«çµ‚止符を打ã¡ã¾ã™ã€‚ + +解決ã™ã¹ã論争ãŒã‚ã‚‹å ´åˆã€ã“ã®ãƒªãƒã‚¸ãƒˆãƒªã®æ‰€æœ‰è€…ã¨ã—ã¦ã®ç§ãŸã¡ãŒæœ€å¾Œã®è¨€è‘‰ã‚’æŒã£ã¦ã„ã¾ã™ã€‚ + + + +## ライセンス + +Scorpio 㯠[BSD-4-Clause](https://spdx.org/licenses/BSD-4-Clause.html) ã®ä¸‹ã§ãƒ©ã‚¤ã‚»ãƒ³ã‚¹ã•れã¦ã„ã¾ã™ã€‚貢献ã«ã¯ã€ã“ã® +[Contribution license](CONTRIBUTING.ja.md) ãŒé©ç”¨ã•れã¾ã™ã€‚ + +© 2020 NEC Laboratories Europe, NEC Technologies India diff --git a/scorpio-broker/README.md b/scorpio-broker/README.md new file mode 100644 index 0000000000000000000000000000000000000000..d9e393a7d5321a8c9e0a37ff881575146d895b5d --- /dev/null +++ b/scorpio-broker/README.md @@ -0,0 +1,47 @@ +# ScorpioBroker Clone + +This repository is a **modified version of [ScorpioBroker](https://github.com/ScorpioBroker/ScorpioBroker)**. +It introduces functionality to **forward temporal queries from source registrations** to another broker or process. + +--- + +## Features + +- **Temporal Query Forwarding** + Allows seamless redirection of temporal queries between brokers or external processes. + +--- + +## Getting Started + +### Prerequisites +Ensure you have the following installed: +- **Maven** +- **Docker** and **Docker Compose** +- **Java JDK 17** + +### Build and Run + +Follow these steps to build and run the broker: + +1. **Build the project and the docker images with Maven:** + ```bash + sudo mvn clean package -DskipTests -Pdocker + ``` + +2. **Start the broker using Docker Compose:** + ```bash + sudo docker compose -f docker-compose-dist.yml up --build + ``` + +3. **Access the broker:** + The broker is now running on port **9090**. + +--- + +### Additional Resources +For more details about the original project, visit the [ScorpioBroker GitHub repository](https://github.com/ScorpioBroker/ScorpioBroker). + +Details on how to run a context source and forward the temporal queries can be found [here](https://github.com/efntallaris/scorpioBroker/blob/master/temporal_queries_source_registrations.md) + +--- diff --git a/scorpio-broker/Registry/.gitignore b/scorpio-broker/Registry/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..1f31adb1dcec3b9e26bcfdd4ca40a9050e4840e7 --- /dev/null +++ b/scorpio-broker/Registry/.gitignore @@ -0,0 +1,5 @@ +/target/ +/logs/ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/Registry/RegistryManager/.gitignore b/scorpio-broker/Registry/RegistryManager/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..07b61c96793e8eb9d934412ad6ae24eb1f79a6d0 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/.gitignore @@ -0,0 +1,5 @@ +/target/ +/.settings/ +/logs/ +.classpath +.project diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/csource-expanded.jsonld b/scorpio-broker/Registry/RegistryManager/db-scripts/csource-expanded.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..ce2a9139105213838517b9e734a765d892192bb2 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/csource-expanded.jsonld @@ -0,0 +1,77 @@ +{ + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/properties": [ + { + "@id": "http://example.org/vehicle/brandName" + }, + { + "@id": "http://example.org/vehicle/speed" + } + ], + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@id": "urn:ngsi-ld:Vehicle:A456", + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + }, + { + "@id": "urn:ngsi-ld:Vehicle:A789", + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + } + ], + "https://uri.etsi.org/ngsi-ld/relationships": [ + { + "@id": "http://example.org/common/isParked" + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@type": [ + "http://example.org/parking/OffStreetParking" + ], + "https://uri.etsi.org/ngsi-ld/idPattern": [ + { + "@value": ".*downtown$" + } + ] + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://my.csource.org:1026" + } + ], + "https://uri.etsi.org/ngsi-ld/expires": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2030-11-29T14:53:15" + } + ], + "https://uri.etsi.org/ngsi-ld/description": [ + { + "@value": "DescriptionExample" + } + ], + "https://uri.etsi.org/ngsi-ld/name": [ + { + "@value": "NameExample" + } + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@value": "{ \"type\": \"Point\", \"coordinates\": [ 8.684783577919006, 49.406131991436396 ] }" + } + ], + "@id": "urn:ngsi-ld:ContextSourceRegistration:csr1a3456", + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ] +} \ No newline at end of file diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/csource.jsonld b/scorpio-broker/Registry/RegistryManager/db-scripts/csource.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..377ce6e86090fefbbeb1906eecaa3e9ad17528e4 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/csource.jsonld @@ -0,0 +1,51 @@ +{ + "@context": [ + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + { + "Vehicle":"http://example.org/vehicle/Vehicle", + "brandName":"http://example.org/vehicle/brandName", + "speed":"http://example.org/vehicle/speed", + "isParked":{ + "@type":"@id", + "@id":"http://example.org/common/isParked" + }, + "OffStreetParking":"http://example.org/parking/OffStreetParking" + } + ], + "id": "urn:ngsi-ld:ContextSourceRegistration:csr1a3456", + "type": "ContextSourceRegistration", + "name": "NameExample", + "description": "DescriptionExample", + "information": [ + { + "entities": [ + { + "id": "urn:ngsi-ld:Vehicle:A456", + "type": "Vehicle" + }, + { + "id": "urn:ngsi-ld:Vehicle:A789", + "type": "Vehicle" + } + ], + "properties": [ + "brandName", + "speed" + ], + "relationships": [ + "isParked" + ] + }, + { + "entities": [ + { + "idPattern": ".*downtown$", + "type": "OffStreetParking" + } + ] + } + ], + "endpoint": "http://my.csource.org:1026", + "location": "{ \"type\": \"Point\", \"coordinates\": [ 8.684783577919006, 49.406131991436396 ] }", + "expires": "2030-11-29T14:53:15" +} \ No newline at end of file diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/dml-examples.sql b/scorpio-broker/Registry/RegistryManager/db-scripts/dml-examples.sql new file mode 100644 index 0000000000000000000000000000000000000000..4e424d712289103b80e4c64b78e3b138da839021 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/dml-examples.sql @@ -0,0 +1,277 @@ +begin; + +truncate csource cascade; + +-- source: csource-expanded.jsonld +insert into CSource (id, data) values ('urn:ngsi-ld:ContextSourceRegistration:csr1a3456', +' +{ + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/properties": [ + { + "@id": "http://example.org/vehicle/brandName" + }, + { + "@id": "http://example.org/vehicle/speed" + } + ], + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@id": "urn:ngsi-ld:Vehicle:A456", + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + }, + { + "@id": "urn:ngsi-ld:Vehicle:A789", + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + } + ], + "https://uri.etsi.org/ngsi-ld/relationships": [ + { + "@id": "http://example.org/common/isParked" + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@type": [ + "http://example.org/parking/OffStreetParking" + ], + "https://uri.etsi.org/ngsi-ld/idPattern": [ + { + "@value": ".*downtown$" + } + ] + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://my.csource.org:1026" + } + ], + "https://uri.etsi.org/ngsi-ld/expires": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2030-11-29T14:53:15" + } + ], + "https://uri.etsi.org/ngsi-ld/description": [ + { + "@value": "DescriptionExample" + } + ], + "https://uri.etsi.org/ngsi-ld/name": [ + { + "@value": "NameExample" + } + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@value": "{ \"type\": \"Point\", \"coordinates\": [ 8.684783577919006, 49.406131991436396 ] }" + } + ], + "@id": "urn:ngsi-ld:ContextSourceRegistration:csr1a3456", + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ] +} +' +); + +insert into CSource (id, data) values ('urn:ngsi-ld:ContextSourceRegistration:csr1a3456_typeonly', +' +{ + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://my.csource.org:1026" + } + ], + "https://uri.etsi.org/ngsi-ld/expires": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2030-11-29T14:53:15" + } + ], + "https://uri.etsi.org/ngsi-ld/description": [ + { + "@value": "DescriptionExample" + } + ], + "https://uri.etsi.org/ngsi-ld/name": [ + { + "@value": "NameExample" + } + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@value": "{ \"type\": \"Point\", \"coordinates\": [ 8.684783577919006, 49.406131991436396 ] }" + } + ], + "@id": "urn:ngsi-ld:ContextSourceRegistration:csr1a3456_typeonly", + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ] +} +' +); + +-- Federation and Geographical csources + +-- Open the file "tests-geo-reg-geojson.io-source.json" in http://geojson.io to graphically see the examples below + +-- Common attributes +select ' + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "https://uri.etsi.org/ngsi-ld/idPattern": [ + { + "@value": ".*" + } + ], + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/expires": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2030-11-29T14:53:15" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ], + "https://uri.etsi.org/ngsi-ld/name": [ + { + "@value": "NameExample" + } + ], + "https://uri.etsi.org/ngsi-ld/description": [ + { + "@value": "DescriptionExample" + } + ] +' AS common_attributes \gset + +\set quoted_common_attributes '\'' :common_attributes '\'' + +-- insert + +-- FedBroker +insert into CSource (id, data) values ('urn:ngsi-ld:TestFed:FedBroker', +('{ + "@id": "urn:ngsi-ld:TestFed:FedBroker", + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@value": "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 8.2342529296875, 49.21759710517596 ], [ 8.957977294921875, 49.21759710517596 ], [ 8.957977294921875, 49.681846899401286 ], [ 8.2342529296875, 49.681846899401286 ], [ 8.2342529296875, 49.21759710517596 ] ] ] }" + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://localhost:10001" + } + ], +' || :quoted_common_attributes || ' +}')::jsonb +); + +-- Sinsheim + wald (almost neckar) +insert into CSource (id, data) values ('urn:ngsi-ld:TestFedReg:Broker1', +('{ + "@id": "urn:ngsi-ld:TestFedReg:Broker1", + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@value": "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 8.751983642578125, 49.23463749585336 ], [ 8.93463134765625, 49.23463749585336 ], [ 8.93463134765625, 49.35286116650209 ], [ 8.751983642578125, 49.35286116650209 ], [ 8.751983642578125, 49.23463749585336 ] ] ] }" + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://localhost:10002" + } + ], +' || :quoted_common_attributes || ' +}')::jsonb +); + + +-- Worms +insert into CSource (id, data) values ('urn:ngsi-ld:TestFedReg:Broker2', +('{ + "@id": "urn:ngsi-ld:TestFedReg:Broker2", + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@value": "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 8.28643798828125, 49.60715036117516 ], [ 8.399047851562498, 49.60715036117516 ], [ 8.399047851562498, 49.664961282899974 ], [ 8.28643798828125, 49.664961282899974 ], [ 8.28643798828125, 49.60715036117516 ] ] ] }" + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://localhost:10003" + } + ], +' || :quoted_common_attributes || ' +}')::jsonb +); + + +-- Heidelberg +insert into CSource (id, data) values ('urn:ngsi-ld:TestFedReg:Broker3', +('{ + "@id": "urn:ngsi-ld:TestFedReg:Broker3", + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@value": "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 8.5968017578125, 49.384160800744986 ], [ 8.76708984375, 49.384160800744986 ], [ 8.76708984375, 49.44134289100633 ], [ 8.5968017578125, 49.44134289100633 ], [ 8.5968017578125, 49.384160800744986 ] ] ] }" + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://localhost:10004" + } + ], +' || :quoted_common_attributes || ' +}')::jsonb +); + + +-- Mannheim + Weinheim + HD + wald near Sinsheim +insert into CSource (id, data) values ('urn:ngsi-ld:TestFedReg:Broker4', +('{ + "@id": "urn:ngsi-ld:TestFedReg:Broker4", + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@value": "{ \"type\": \"Polygon\", \"coordinates\": [ [ [ 8.34686279296875, 49.298262740098345 ], [ 8.850860595703125, 49.298262740098345 ], [ 8.850860595703125, 49.55283460376055 ], [ 8.34686279296875, 49.55283460376055 ], [ 8.34686279296875, 49.298262740098345 ] ] ] }" + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://localhost:10005" + } + ], +' || :quoted_common_attributes || ' +}')::jsonb +); + +commit; + diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/geojson-io.sql b/scorpio-broker/Registry/RegistryManager/db-scripts/geojson-io.sql new file mode 100644 index 0000000000000000000000000000000000000000..6eb462d5546ee727c71ddafcd3816bb67a2fa14e --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/geojson-io.sql @@ -0,0 +1,16 @@ +\pset pager 0 +\a +\t +\pset recordsep ' , ' + +\echo This SQL script generates a FeatureCollection to graphically visualize all csources geometries (based on "location" attribute) +\echo Just copy the json below and paste at http://geojson.io +\echo ------------------------ +\echo '{ "type": "FeatureCollection", "features": [' +select '{"type": "Feature", + "properties": {}, + "geometry": ' || coalesce((data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}')::text, 'null') || '}' as g + from csource + where data ? 'https://uri.etsi.org/ngsi-ld/location'; + +\echo '] }' \ No newline at end of file diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/how-to.txt b/scorpio-broker/Registry/RegistryManager/db-scripts/how-to.txt new file mode 100644 index 0000000000000000000000000000000000000000..ccf8085b47d9888c4228ec16b80752db3e932f73 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/how-to.txt @@ -0,0 +1,15 @@ +NGB Database how-to for Ubuntu +Tested in Ubuntu 18.04 + +1. Install PostgreSQL 10 +$ sudo apt install postgresql-10 postgresql-client-10 postgresql-server-dev-10 + +2. Install PostGIS +$ sudo apt install postgresql-10-postgis-2.4 postgresql-10-postgis-scripts + +3. Create ngb user +$ sudo su - postgres +$ psql -c "create user ngb superuser createdb password 'ngb'"; + +4. Create ngb database +$ psql -c "create database ngb owner = ngb"; \ No newline at end of file diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/all-infotests.sql b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/all-infotests.sql new file mode 100644 index 0000000000000000000000000000000000000000..8f71b2e3f4ea34c1ba9bb4cea10cf89f8d8233f0 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/all-infotests.sql @@ -0,0 +1,320 @@ +begin; + +delete from csource where id like 'urn:ngsi-ld:ContextSourceRegistration:infotest%'; + +insert into csource (id, data) values ( +'urn:ngsi-ld:ContextSourceRegistration:infotest_type', +' + { + "@id": "urn:ngsi-ld:ContextSourceRegistration:infotest_type", + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://my.csource.org:1026" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ] + } +' +); + +insert into csource (id, data) values ( +'urn:ngsi-ld:ContextSourceRegistration:infotest_type_id', +' + { + "@id": "urn:ngsi-ld:ContextSourceRegistration:infotest_type_id", + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@id": "urn:ngsi-ld:Vehicle:A456", + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://my.csource.org:1026" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ] + } +'); + +insert into csource (id, data) values ( +'urn:ngsi-ld:ContextSourceRegistration:infotest_type_multiple_ids', +' + { + "@id": "urn:ngsi-ld:ContextSourceRegistration:infotest_type_multiple_ids", + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@id": "urn:ngsi-ld:Vehicle:A456", + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + }, + { + "@id": "urn:ngsi-ld:Vehicle:A789", + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://my.csource.org:1026" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ] + } +'); + +insert into csource (id, data) values ( +'urn:ngsi-ld:ContextSourceRegistration:infotest_type_idPattern', +' + { + "@id": "urn:ngsi-ld:ContextSourceRegistration:infotest_type_idPattern", + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@type": [ + "http://example.org/vehicle/Vehicle" + ], + "https://uri.etsi.org/ngsi-ld/idPattern": [ + { + "@value": "urn:ngsi-ld:Vehicle.*" + } + ] + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://my.csource.org:1026" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ] + } +'); + +insert into csource (id, data) values ( +'urn:ngsi-ld:ContextSourceRegistration:infotest_type_id_idPattern', +' + { + "@id": "urn:ngsi-ld:ContextSourceRegistration:infotest_type_id_idPattern", + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@id": "urn:ngsi-ld:Vehicle:A456", + "@type": [ + "http://example.org/vehicle/Vehicle" + ], + "https://uri.etsi.org/ngsi-ld/idPattern": [ + { + "@value": "urn:ngsi-ld:Vehicle.*" + } + ] + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://my.csource.org:1026" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ] + } +'); + +insert into csource (id, data) values ( +'urn:ngsi-ld:ContextSourceRegistration:infotest_attributes', +' { + "@id": "urn:ngsi-ld:ContextSourceRegistration:infotest_attributes", + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/properties": [ + { + "@id": "http://example.org/vehicle/brandName" + }, + { + "@id": "http://example.org/vehicle/speed" + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/properties": [ + { + "@id": "http://example.org/vehicle/fuelType" + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/relationships": [ + { + "@id": "http://example.org/common/isParked" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://my.csource.org:1026" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ] + } + +'); + +insert into csource (id, data) values ( +'urn:ngsi-ld:ContextSourceRegistration:infotest_entity_type_attributes', +' + { + "@id": "urn:ngsi-ld:ContextSourceRegistration:infotest_entity_type_attributes", + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/properties": [ + { + "@id": "http://example.org/vehicle/brandName" + }, + { + "@id": "http://example.org/vehicle/speed" + } + ], + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + } + ], + "https://uri.etsi.org/ngsi-ld/relationships": [ + { + "@id": "http://example.org/common/isParked" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://my.csource.org:1026" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ] + } +'); + +insert into csource (id, data) values ( +'urn:ngsi-ld:ContextSourceRegistration:infotest_many_elements', +' + { + "@id": "urn:ngsi-ld:ContextSourceRegistration:infotest_many_elements", + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/properties": [ + { + "@id": "http://example.org/vehicle/brandName" + }, + { + "@id": "http://example.org/vehicle/speed" + } + ], + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/relationships": [ + { + "@id": "http://example.org/common/isParked" + } + ], + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/properties": [ + { + "@id": "http://example.org/room/temperature" + } + ], + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@type": [ + "http://example.org/room/Room" + ] + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/properties": [ + { + "@id": "http://example.org/vehicle/brandName" + } + ], + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@id": "urn:ngsi-ld:Vehicle:A456", + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://my.csource.org:1026" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ] + } +'); + +commit; \ No newline at end of file diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/createCsources.sh b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/createCsources.sh new file mode 100644 index 0000000000000000000000000000000000000000..04d33a6c599c90660f174ab5ba41fe0478d6ffeb --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/createCsources.sh @@ -0,0 +1,9 @@ +#!/bin/bash +set -o xtrace +for file in ./*.jsonld; do + echo ${file##*/} + curl -X POST -H 'Accept: application/ld+json' -H 'Content-Type: application/json' --data "@${file##*/}" http://localhost:9090/ngsi-ld/v1/csourceRegistrations/ + +done + + diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest1.jsonld b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest1.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..50bf2769f7107dae80d7ccfd9ad637923daff7c0 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest1.jsonld @@ -0,0 +1,22 @@ +{ + "@context": [ + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + { + "Vehicle":"http://example.org/vehicle/Vehicle", + "brandName":"http://example.org/vehicle/brandName", + "speed":"http://example.org/vehicle/speed" + } + ], + "id": "urn:ngsi-ld:ContextSourceRegistration:infotest_type", + "type": "ContextSourceRegistration", + "information": [ + { + "entities": [ + { + "type": "Vehicle" + } + ] + } + ], + "endpoint": "http://my.csource.org:1026" +} diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest2.jsonld b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest2.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..23f919a8f56d63c5a92bea6fde79804e068cdb2a --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest2.jsonld @@ -0,0 +1,23 @@ +{ + "@context": [ + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + { + "Vehicle":"http://example.org/vehicle/Vehicle", + "brandName":"http://example.org/vehicle/brandName", + "speed":"http://example.org/vehicle/speed" + } + ], + "id": "urn:ngsi-ld:ContextSourceRegistration:infotest_type_id", + "type": "ContextSourceRegistration", + "information": [ + { + "entities": [ + { + "type": "Vehicle", + "id": "urn:ngsi-ld:Vehicle:A456" + } + ] + } + ], + "endpoint": "http://my.csource.org:1026" +} diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest2a.jsonld b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest2a.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..cfe3f5a250015a1a04c18e056b153a5d9e5a9b07 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest2a.jsonld @@ -0,0 +1,27 @@ +{ + "@context": [ + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + { + "Vehicle":"http://example.org/vehicle/Vehicle", + "brandName":"http://example.org/vehicle/brandName", + "speed":"http://example.org/vehicle/speed" + } + ], + "id": "urn:ngsi-ld:ContextSourceRegistration:infotest_type_multiple_ids", + "type": "ContextSourceRegistration", + "information": [ + { + "entities": [ + { + "type": "Vehicle", + "id": "urn:ngsi-ld:Vehicle:A456" + }, + { + "type": "Vehicle", + "id": "urn:ngsi-ld:Vehicle:A789" + } + ] + } + ], + "endpoint": "http://my.csource.org:1026" +} diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest3.jsonld b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest3.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..8d3da295462e3639766e9cd6df162a324aeef0e2 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest3.jsonld @@ -0,0 +1,23 @@ +{ + "@context": [ + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + { + "Vehicle":"http://example.org/vehicle/Vehicle", + "brandName":"http://example.org/vehicle/brandName", + "speed":"http://example.org/vehicle/speed" + } + ], + "id": "urn:ngsi-ld:ContextSourceRegistration:infotest_type_idPattern", + "type": "ContextSourceRegistration", + "information": [ + { + "entities": [ + { + "type": "Vehicle", + "idPattern": "urn:ngsi-ld:Vehicle.*" + } + ] + } + ], + "endpoint": "http://my.csource.org:1026" +} diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest4.jsonld b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest4.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..271554242308c441ca493c027a1f517b17523d7b --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest4.jsonld @@ -0,0 +1,24 @@ +{ + "@context": [ + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + { + "Vehicle":"http://example.org/vehicle/Vehicle", + "brandName":"http://example.org/vehicle/brandName", + "speed":"http://example.org/vehicle/speed" + } + ], + "id": "urn:ngsi-ld:ContextSourceRegistration:infotest_type_id_idPattern", + "type": "ContextSourceRegistration", + "information": [ + { + "entities": [ + { + "type": "Vehicle", + "id": "urn:ngsi-ld:Vehicle:A456", + "idPattern": "urn:ngsi-ld:Vehicle.*" + } + ] + } + ], + "endpoint": "http://my.csource.org:1026" +} diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest5.jsonld b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest5.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..2cffc29c49bf28c9d1c2feb09e0fca03a5d754a5 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest5.jsonld @@ -0,0 +1,36 @@ +{ + "@context": [ + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + { + "Vehicle":"http://example.org/vehicle/Vehicle", + "brandName":"http://example.org/vehicle/brandName", + "speed":"http://example.org/vehicle/speed", + "fuelType":"http://example.org/vehicle/fuelType", + "isParked": { + "@type": "@id", + "@id": "http://example.org/common/isParked" + } + } + ], + "id": "urn:ngsi-ld:ContextSourceRegistration:infotest_attributes", + "type": "ContextSourceRegistration", + "information": [ + { + "properties": [ + "brandName", + "speed" + ] + }, + { + "properties": [ + "fuelType" + ] + }, + { + "relationships": [ + "isParked" + ] + } + ], + "endpoint": "http://my.csource.org:1026" +} diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest6.jsonld b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest6.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..9596daf0f264eaf58ee49934399b49bca156c3b3 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest6.jsonld @@ -0,0 +1,33 @@ +{ + "@context": [ + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + { + "Vehicle":"http://example.org/vehicle/Vehicle", + "brandName":"http://example.org/vehicle/brandName", + "speed":"http://example.org/vehicle/speed", + "isParked": { + "@type": "@id", + "@id": "http://example.org/common/isParked" + } + } + ], + "id": "urn:ngsi-ld:ContextSourceRegistration:infotest_entity_type_attributes", + "type": "ContextSourceRegistration", + "information": [ + { + "entities": [ + { + "type": "Vehicle" + } + ], + "properties": [ + "brandName", + "speed" + ], + "relationships": [ + "isParked" + ] + } + ], + "endpoint": "http://my.csource.org:1026" +} diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest7.jsonld b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest7.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..0505cc43fdd31b84a360a6ab1a26fb8e74f5d437 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/csource-infotest7.jsonld @@ -0,0 +1,63 @@ +{ + "@context": [ + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + { + "Vehicle":"http://example.org/vehicle/Vehicle", + "brandName":"http://example.org/vehicle/brandName", + "speed":"http://example.org/vehicle/speed", + "isParked": { + "@type": "@id", + "@id": "http://example.org/common/isParked" + }, + "Room":"http://example.org/room/Room", + "temperature":"http://example.org/room/temperature" + } + ], + "id": "urn:ngsi-ld:ContextSourceRegistration:infotest_many_elements", + "type": "ContextSourceRegistration", + "information": [ + { + "entities": [ + { + "type": "Vehicle" + } + ], + "properties": [ + "brandName", + "speed" + ] + }, + { + "entities": [ + { + "type": "Vehicle" + } + ], + "relationships": [ + "isParked" + ] + }, + { + "entities": [ + { + "type": "Room" + } + ], + "properties": [ + "temperature" + ] + }, + { + "entities": [ + { + "type": "Vehicle", + "id": "urn:ngsi-ld:Vehicle:A456" + } + ], + "properties": [ + "brandName" + ] + } + ], + "endpoint": "http://my.csource.org:1026" +} diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/expandCsources.sh b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/expandCsources.sh new file mode 100644 index 0000000000000000000000000000000000000000..51029b9de7cc84a15fbd053da4157f7e8efef031 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/infotests/expandCsources.sh @@ -0,0 +1,13 @@ +#!/bin/bash +set -o xtrace + +jsonExpander="python /media/sf_ngb/jsonld-expand.py" + +cat /dev/null > output.txt +for file in ./*.jsonld; do + echo ${file##*/} + ${jsonExpander} ${file##*/} >> output.txt + echo "" >> output.txt +done + + diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/ngsild-geoquery-language-mapping.sql b/scorpio-broker/Registry/RegistryManager/db-scripts/ngsild-geoquery-language-mapping.sql new file mode 100644 index 0000000000000000000000000000000000000000..6329c04de7e30a5a5436f6a4da0c5ab27dcf4625 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/ngsild-geoquery-language-mapping.sql @@ -0,0 +1,117 @@ +\pset pager 0 +-- \set ECHO queries + +-- Query area: +-- { "type": "Polygon", "coordinates": [ [ [ 8.400421142578125, 49.32333182991094 ], [ 8.812408447265625, 49.32333182991094 ], [ 8.812408447265625, 49.49489061140408 ], [ 8.400421142578125, 49.49489061140408 ], [ 8.400421142578125, 49.32333182991094 ] ] ] } + +\echo case 1: near min -> expand + not within - distance: 8km +select id + from csource + where + id like 'urn:ngsi-ld:TestFedReg:%' AND + NOT ST_Within( + location, + ST_Buffer( + ST_SetSRID(ST_GeomFromGeoJSON( '{ "type": "Polygon", "coordinates": [ [ [ 8.400421142578125, 49.32333182991094 ], [ 8.812408447265625, 49.32333182991094 ], [ 8.812408447265625, 49.49489061140408 ], [ 8.400421142578125, 49.49489061140408 ], [ 8.400421142578125, 49.32333182991094 ] ] ] }'), 4326)::geography, + 8000 + )::geometry + ); + +\echo case 2: near max -> expand + intersects - distance: 8km +select id + from csource + where + id like 'urn:ngsi-ld:TestFedReg:%' AND + ST_Intersects( + location, + ST_Buffer( + ST_SetSRID(ST_GeomFromGeoJSON( '{ "type": "Polygon", "coordinates": [ [ [ 8.400421142578125, 49.32333182991094 ], [ 8.812408447265625, 49.32333182991094 ], [ 8.812408447265625, 49.49489061140408 ], [ 8.400421142578125, 49.49489061140408 ], [ 8.400421142578125, 49.32333182991094 ] ] ] }'), 4326)::geography, + 8000 + )::geometry + ); + +\echo case 3: within -> intersects +select id + from csource + where + id like 'urn:ngsi-ld:TestFedReg:%' AND + ST_Intersects( + location, + ST_SetSRID(ST_GeomFromGeoJSON( '{ "type": "Polygon", "coordinates": [ [ [ 8.400421142578125, 49.32333182991094 ], [ 8.812408447265625, 49.32333182991094 ], [ 8.812408447265625, 49.49489061140408 ], [ 8.400421142578125, 49.49489061140408 ], [ 8.400421142578125, 49.32333182991094 ] ] ] }'),4326) + ); + +\echo case 4: contains -> contains +select id + from csource + where + id like 'urn:ngsi-ld:TestFedReg:%' AND + ST_Contains( + location, + ST_SetSRID(ST_GeomFromGeoJSON( '{ "type": "Polygon", "coordinates": [ [ [ 8.400421142578125, 49.32333182991094 ], [ 8.812408447265625, 49.32333182991094 ], [ 8.812408447265625, 49.49489061140408 ], [ 8.400421142578125, 49.49489061140408 ], [ 8.400421142578125, 49.32333182991094 ] ] ] }'),4326) + ); + +\echo case 5: overlaps -> overlaps OR contains +select id + from csource + where + id like 'urn:ngsi-ld:TestFedReg:%' AND + ( + ST_Overlaps( + location, + ST_SetSRID(ST_GeomFromGeoJSON( '{ "type": "Polygon", "coordinates": [ [ [ 8.400421142578125, 49.32333182991094 ], [ 8.812408447265625, 49.32333182991094 ], [ 8.812408447265625, 49.49489061140408 ], [ 8.400421142578125, 49.49489061140408 ], [ 8.400421142578125, 49.32333182991094 ] ] ] }'), 4326) + ) OR + ST_Contains( + location, + ST_SetSRID(ST_GeomFromGeoJSON( '{ "type": "Polygon", "coordinates": [ [ [ 8.400421142578125, 49.32333182991094 ], [ 8.812408447265625, 49.32333182991094 ], [ 8.812408447265625, 49.49489061140408 ], [ 8.400421142578125, 49.49489061140408 ], [ 8.400421142578125, 49.32333182991094 ] ] ] }'), 4326) + ) + ); + + +\echo case 6: intersects -> intersects +select id + from csource + where + id like 'urn:ngsi-ld:TestFedReg:%' AND + ST_Intersects( + location, + ST_SetSRID(ST_GeomFromGeoJSON( '{ "type": "Polygon", "coordinates": [ [ [ 8.400421142578125, 49.32333182991094 ], [ 8.812408447265625, 49.32333182991094 ], [ 8.812408447265625, 49.49489061140408 ], [ 8.400421142578125, 49.49489061140408 ], [ 8.400421142578125, 49.32333182991094 ] ] ] }'), 4326) + ); + +\echo case 7: equals -> contains +select id + from csource + where + id like 'urn:ngsi-ld:TestFedReg:%' AND + ST_Contains( + location, + ST_SetSRID(ST_GeomFromGeoJSON( '{ "type": "Polygon", "coordinates": [ [ [ 8.400421142578125, 49.32333182991094 ], [ 8.812408447265625, 49.32333182991094 ], [ 8.812408447265625, 49.49489061140408 ], [ 8.400421142578125, 49.49489061140408 ], [ 8.400421142578125, 49.32333182991094 ] ] ] }'), 4326) + ); + +\echo case 8: disjoint -> not within +select id + from csource + where + id like 'urn:ngsi-ld:TestFedReg:%' AND + NOT ST_Within( + location, + ST_SetSRID(ST_GeomFromGeoJSON( '{ "type": "Polygon", "coordinates": [ [ [ 8.400421142578125, 49.32333182991094 ], [ 8.812408447265625, 49.32333182991094 ], [ 8.812408447265625, 49.49489061140408 ], [ 8.400421142578125, 49.49489061140408 ], [ 8.400421142578125, 49.32333182991094 ] ] ] }'), 4326) + ); + + +-- tests srid and buffer +select + ST_AsGeoJSON( + ST_Buffer( + ST_SetSRID(ST_GeomFromGeoJSON( '{ "type": "Polygon", "coordinates": [ [ [ 8.400421142578125, 49.32333182991094 ], [ 8.812408447265625, 49.32333182991094 ], [ 8.812408447265625, 49.49489061140408 ], [ 8.400421142578125, 49.49489061140408 ], [ 8.400421142578125, 49.32333182991094 ] ] ] }'), 4326)::geography, + 8000 + ) + ), + st_srid(ST_GeomFromGeoJSON( '{ "type": "Polygon", "coordinates": [ [ [ 8.400421142578125, 49.32333182991094 ], [ 8.812408447265625, 49.32333182991094 ], [ 8.812408447265625, 49.49489061140408 ], [ 8.400421142578125, 49.49489061140408 ], [ 8.400421142578125, 49.32333182991094 ] ] ] }')) as srid_default, + st_srid(ST_GeomFromGeoJSON( '{ "type": "Polygon", "coordinates": [ [ [ 8.400421142578125, 49.32333182991094 ], [ 8.812408447265625, 49.32333182991094 ], [ 8.812408447265625, 49.49489061140408 ], [ 8.400421142578125, 49.49489061140408 ], [ 8.400421142578125, 49.32333182991094 ] ] ] }')::geography) as srid_geography, + st_srid( + ST_Buffer( + ST_SetSRID(ST_GeomFromGeoJSON( '{ "type": "Polygon", "coordinates": [ [ [ 8.400421142578125, 49.32333182991094 ], [ 8.812408447265625, 49.32333182991094 ], [ 8.812408447265625, 49.49489061140408 ], [ 8.400421142578125, 49.49489061140408 ], [ 8.400421142578125, 49.32333182991094 ] ] ] }'), 4326)::geography, + 8000 + ) + ) as srid_buffer_geography + ; diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/ngsild-query-language-mapping.sql b/scorpio-broker/Registry/RegistryManager/db-scripts/ngsild-query-language-mapping.sql new file mode 100644 index 0000000000000000000000000000000000000000..5b38849d52bf997b55fdb52ceacd1a6a0025ec9e --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/ngsild-query-language-mapping.sql @@ -0,0 +1,253 @@ +\pset pager 0 +-- \set ECHO queries + +/* + +Section 5.10 + +General rule 1) If present, the entity specification in the query consisting of a combination of entity type and +entity id/entity id pattern matches an EntityInfo specified in a RegistrationInfo of the information property in a +context source registration. ***If there is no EntityInfo specified in the RegistrationInfo, the entity specification +is considered matching.*** + +General rule 2) If present, at least one Attribute name specified in the query matches one Property or Relationship in +the RegistrationInfo element of the information property in a context source registration. ***If no Properties or Relationships +are specified in the RegistrationInfo, the Attribute names are considered matching.*** + + +Section 5.12 +An Entity specification consisting of Entity Types, Entity identifiers and id pattern matches an EntityInfo element +if one of the specified Entity Types matches the entity type in the EntityInfo element and one of the following conditions holds +(only in case an id or idPattern is specified in the query [included after a discussion with Martin in Feb. 2019]): +a) The EntityInfo contains neither an id nor an idPattern. +b) One of the specified entity identifiers matches the id in the EntityInfo. +c) At least one of the specified entity identifiers matches the idPattern in the EntityInfo. +d) The specified id pattern matches the id in the EntityInfo. +e) Both a specified id pattern and an idPattern in the Entity Info are present (since in the general case it is not easily feasible to determine if there can be identifiers matching both patterns). + +Attribute names match the combination of Properties and Relationships if one of the following conditions hold: +f) No Attribute names have been specified (as this means all Attributes are requested). +g) The combination of Properties and Relationships is empty (as this means only Entities have been registered and the Context Sources may have matching Property or Relationship instances). +h) If at least one of the specified attribute names matches a Property or Relationship specified in the RegistrationInfo. +*/ + +\echo ****** Filters by TYPE + (id/idPattern) + +\echo *** Case 1) +\echo type=http://example.org/vehicle/Vehicle +\echo General rule 1 + +SELECT DISTINCT c.id FROM csource c +INNER JOIN csourceinformation ci ON (ci.csource_id = c.id) +WHERE (c.has_registrationinfo_with_attrs_only) OR + (ci.entity_type = 'http://example.org/vehicle/Vehicle'); + +\echo type=http://example.org/vehicle/Vehicle,http://example.org/bus/Bus +SELECT DISTINCT c.id FROM csource c +INNER JOIN csourceinformation ci ON (ci.csource_id = c.id) +WHERE (c.has_registrationinfo_with_attrs_only) OR + (ci.entity_type IN ('http://example.org/vehicle/Vehicle', 'http://example.org/bus/Bus')); + + +\echo *** Case 2) +\echo type=http://example.org/vehicle/Vehicle&id=urn:ngsi-ld:Vehicle:A456 +\echo General rule 1 +\echo a) The EntityInfo contains neither an id nor an idPattern +\echo b) One of the specified entity identifiers matches the id in the EntityInfo. +\echo c) At least one of the specified entity identifiers matches the idPattern in the EntityInfo. + +SELECT DISTINCT c.id FROM csource c +INNER JOIN csourceinformation ci ON (ci.csource_id = c.id) +WHERE (c.has_registrationinfo_with_attrs_only) OR + ( + (ci.entity_type = 'http://example.org/vehicle/Vehicle' AND + ci.entity_id IS NULL AND + ci.entity_idpattern IS NULL) OR + (ci.entity_type = 'http://example.org/vehicle/Vehicle' AND + (ci.entity_id = 'urn:ngsi-ld:Vehicle:A456' OR + 'urn:ngsi-ld:Vehicle:A456' ~ ci.entity_idPattern) + ) + ); + +\echo type=http://example.org/vehicle/Vehicle&id=urn:ngsi-ld:Vehicle:A789 +SELECT DISTINCT c.id FROM csource c +INNER JOIN csourceinformation ci ON (ci.csource_id = c.id) +WHERE (c.has_registrationinfo_with_attrs_only) OR + ( + (ci.entity_type = 'http://example.org/vehicle/Vehicle' AND + ci.entity_id IS NULL AND + ci.entity_idpattern IS NULL) OR + (ci.entity_type = 'http://example.org/vehicle/Vehicle' AND + (ci.entity_id = 'urn:ngsi-ld:Vehicle:A789' OR + 'urn:ngsi-ld:Vehicle:A789' ~ ci.entity_idPattern) + ) + ); + +\echo type=http://example.org/vehicle/Vehicle&id=urn:ngsi-ld:Vehicle:A456,urn:ngsi-ld:Vehicle:A789 +SELECT DISTINCT c.id FROM csource c +INNER JOIN csourceinformation ci ON (ci.csource_id = c.id) +WHERE (c.has_registrationinfo_with_attrs_only) OR + ( + (ci.entity_type = 'http://example.org/vehicle/Vehicle' AND + ci.entity_id IS NULL AND + ci.entity_idpattern IS NULL) OR + (ci.entity_type = 'http://example.org/vehicle/Vehicle' AND + (ci.entity_id IN ('urn:ngsi-ld:Vehicle:A456', 'urn:ngsi-ld:Vehicle:A789') OR + ('urn:ngsi-ld:Vehicle:A456' ~ ci.entity_idPattern OR + 'urn:ngsi-ld:Vehicle:A789' ~ ci.entity_idPattern) + ) + ) + ); + + +\echo *** Case 3) +\echo type=http://example.org/vehicle/Vehicle&idPattern=urn:ngsi-ld:Vehi.* +\echo General rule 1 +\echo a) The EntityInfo contains neither an id nor an idPattern +\echo d) The specified id pattern matches the id in the EntityInfo. +\echo e) Both a specified id pattern and an idPattern in the Entity Info are present (since in the general case it is not easily feasible to determine if there can be identifiers matching both patterns). + +SELECT DISTINCT c.id FROM csource c +INNER JOIN csourceinformation ci ON (ci.csource_id = c.id) +WHERE (c.has_registrationinfo_with_attrs_only) OR + ( + (ci.entity_type = 'http://example.org/vehicle/Vehicle' AND + ci.entity_id IS NULL AND + ci.entity_idpattern IS NULL) OR + (ci.entity_type = 'http://example.org/vehicle/Vehicle' AND + (ci.entity_id ~ 'urn:ngsi-ld:Vehi.*' OR + ci.entity_idpattern ~ 'urn:ngsi-ld:Vehi.*') + ) + ); + +\echo *** Case 4) +\echo type=http://example.org/vehicle/Vehicle&id=urn:ngsi-ld:Vehicle:A456&idPattern=urn:ngsi-ld:Vehi.* + +\echo on this case, idPattern must be ignored. spec says id takes precedence over idPattern. +\echo thus, this case is the same as case 2. + + +\echo ****** Filters by ATTRIBUTE + + +\echo *** Case 5) +\echo attrs=http://example.org/vehicle/brandName +\echo General rule 2 +\echo h) If at least one of the specified attribute names matches a Property or Relationshiop in the RegistrationInfo + +SELECT DISTINCT c.id FROM csource c +INNER JOIN csourceinformation ci ON (ci.csource_id = c.id) +WHERE (ci.property_id = 'http://example.org/vehicle/brandName' OR + ci.relationship_id = 'http://example.org/vehicle/brandName'); + +/* *** We had an internal discussion and decided to not match RegistrationInfos with EntityInfo only: +SELECT DISTINCT c.id FROM csource c +INNER JOIN csourceinformation ci ON (ci.csource_id = c.id) +WHERE (c.has_registrationinfo_with_entityinfo_only) OR + (ci.property_id = 'http://example.org/vehicle/brandName' OR + ci.relationship_id = 'http://example.org/vehicle/brandName');*/ + +\echo attrs=http://example.org/vehicle/brandName,http://example.org/common/isParked +SELECT DISTINCT c.id FROM csource c +INNER JOIN csourceinformation ci ON (ci.csource_id = c.id) +WHERE (ci.property_id IN ('http://example.org/vehicle/brandName', 'http://example.org/common/isParked') OR + ci.relationship_id IN ('http://example.org/vehicle/brandName', 'http://example.org/common/isParked')); + + +\echo ****** Filters by TYPE + (id/idPattern) + ATTRIBUTE + +\echo *** Case 6) +\echo type=http://example.org/vehicle/Vehicle&attrs=http://example.org/vehicle/brandName +\echo General rule 1 +\echo General rule 2 +\echo g) The combination of Properties and Relationships is empty +\echo h) If at least one of the specified attribute names matches a Property or Relationshiop in the RegistrationInfo + +SELECT DISTINCT c.id FROM csource c +INNER JOIN csourceinformation ci ON (ci.csource_id = c.id) +WHERE (ci.entity_type = 'http://example.org/vehicle/Vehicle') + + AND + + ( + NOT EXISTS (SELECT 1 FROM csourceinformation ci2 + WHERE ci2.group_id = ci.group_id AND + (ci2.property_id IS NOT NULL OR ci2.relationship_id IS NOT NULL)) + OR + EXISTS (SELECT 1 FROM csourceinformation ci3 + WHERE ci3.group_id = ci.group_id AND + (ci3.property_id = 'http://example.org/vehicle/brandName' OR + ci3.relationship_id = 'http://example.org/vehicle/brandName')) + ); + + +\echo *** Case 7) +\echo type=http://example.org/vehicle/Vehicle&id=urn:ngsi-ld:Vehicle:A456&attrs=http://example.org/vehicle/brandName +\echo General rule 1 +\echo General rule 2 +\echo a) The EntityInfo contains neither an id nor an idPattern +\echo b) One of the specified entity identifiers matches the id in the EntityInfo. +\echo c) At least one of the specified entity identifiers matches the idPattern in the EntityInfo. +\echo g) The combination of Properties and Relationships is empty +\echo h) If at least one of the specified attribute names matches a Property or Relationshiop in the RegistrationInfo + +SELECT DISTINCT c.id FROM csource c +INNER JOIN csourceinformation ci ON (ci.csource_id = c.id) +WHERE ( + (ci.entity_type = 'http://example.org/vehicle/Vehicle' AND + ci.entity_id IS NULL AND + ci.entity_idpattern IS NULL) OR + (ci.entity_type = 'http://example.org/vehicle/Vehicle' AND + (ci.entity_id = 'urn:ngsi-ld:Vehicle:A456' OR + 'urn:ngsi-ld:Vehicle:A456' ~ ci.entity_idPattern) + ) + ) + + AND + + ( + NOT EXISTS (SELECT 1 FROM csourceinformation ci2 + WHERE ci2.group_id = ci.group_id AND + (ci2.property_id IS NOT NULL OR ci2.relationship_id IS NOT NULL)) + OR + EXISTS (SELECT 1 FROM csourceinformation ci3 + WHERE ci3.group_id = ci.group_id AND + (ci3.property_id = 'http://example.org/vehicle/brandName' OR + ci3.relationship_id = 'http://example.org/vehicle/brandName')) + ); + + +\echo *** Case 8) +\echo type=http://example.org/vehicle/Vehicle&idPattern=urn:ngsi-ld:Vehi.*&attrs=http://example.org/vehicle/brandName +\echo General rule 1 +\echo General rule 2 +\echo a) The EntityInfo contains neither an id nor an idPattern +\echo d) The specified id pattern matches the id in the EntityInfo. +\echo e) Both a specified id pattern and an idPattern in the Entity Info are present (since in the general case it is not easily feasible to determine if there can be identifiers matching both patterns). +\echo g) The combination of Properties and Relationships is empty +\echo h) If at least one of the specified attribute names matches a Property or Relationshiop in the RegistrationInfo + +SELECT DISTINCT c.id FROM csource c +INNER JOIN csourceinformation ci ON (ci.csource_id = c.id) +WHERE ( + (ci.entity_type = 'http://example.org/vehicle/Vehicle' AND + ci.entity_id IS NULL AND + ci.entity_idpattern IS NULL) OR + (ci.entity_type = 'http://example.org/vehicle/Vehicle' AND + (ci.entity_id ~ 'urn:ngsi-ld:Vehi.*' OR + ci.entity_idpattern ~ 'urn:ngsi-ld:Vehi.*') + ) + ) + + AND + + ( + NOT EXISTS (SELECT 1 FROM csourceinformation ci2 + WHERE ci2.group_id = ci.group_id AND + (ci2.property_id IS NOT NULL OR ci2.relationship_id IS NOT NULL)) + OR + EXISTS (SELECT 1 FROM csourceinformation ci3 + WHERE ci3.group_id = ci.group_id AND + (ci3.property_id = 'http://example.org/vehicle/brandName' OR + ci3.relationship_id = 'http://example.org/vehicle/brandName')) + ); diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/test-performance-insert.sql b/scorpio-broker/Registry/RegistryManager/db-scripts/test-performance-insert.sql new file mode 100644 index 0000000000000000000000000000000000000000..2f80d3fd01f5f97d0937c99ba1109bc7fddf8e38 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/test-performance-insert.sql @@ -0,0 +1,206 @@ +\pset pager 0 +\timing + +\echo Cleaning benchmark data... +-- delete from csource where id like 'urn:ngsi-ld:ContextSourceRegistration:AP%'; +truncate csource cascade; + +\echo Inserting data... +-- source: csource-expanded.jsonld +insert into csource (id, data) +select +'urn:ngsi-ld:ContextSourceRegistration:AP' || i, +( +' +{ + "@id": "urn:ngsi-ld:ContextSourceRegistration:AP' || i || '", + "https://uri.etsi.org/ngsi-ld/name": [ + { + "@value": "NameExample' || i* trunc(100*random()) || '" + } + ], + "https://uri.etsi.org/ngsi-ld/description": [ + { + "@value": "DescriptionExample" + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://my.csource.org:1026" + } + ], + "https://uri.etsi.org/ngsi-ld/expires": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2030-11-29T14:53:15" + } + ], + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@id": "urn:ngsi-ld:Vehicle:AP' || trunc(10000*random()) || '", + "@type": [ + "http://example.org/vehicle/Vehicle" + ] + } + ], + "https://uri.etsi.org/ngsi-ld/properties": [ + { + "@id": "https://json-ld.org/playground/brandName" + }, + { + "@id": "https://json-ld.org/playground/speed" + } + ], + "https://uri.etsi.org/ngsi-ld/relationships": [ + { + "@id": "https://json-ld.org/playground/isParked" + } + ] + }, + { + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "https://uri.etsi.org/ngsi-ld/idPattern": [ + { + "@value": ".*downtown$" + } + ], + "@type": [ + "http://example.org/parking/OffStreetParking" + ] + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@value": "{ \"type\": \"Polygon\", \"coordinates\": [[[8.686752319335938,49.359122687528746],[8.742027282714844,49.3642654834877],[8.767433166503904,49.398462568451485],[8.768119812011719,49.42750021620163],[8.74305725097656,49.44781634951542],[8.669242858886719,49.43754770762113],[8.63525390625,49.41968407776289],[8.637657165527344,49.3995797187007],[8.663749694824219,49.36851347448498],[8.686752319335938,49.359122687528746]]] }" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ] +} +' +)::jsonb + FROM generate_series(1, 999995) i; + +insert into csource (id, data) +select +'urn:ngsi-ld:ContextSourceRegistration:AP' || i, +( +' +{ + "@id": "urn:ngsi-ld:ContextSourceRegistration:AP' || i || '", + "https://uri.etsi.org/ngsi-ld/name": [ + { + "@value": "NameExample' || i* trunc(100*random()) || '" + } + ], + "https://uri.etsi.org/ngsi-ld/description": [ + { + "@value": "DescriptionExample" + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://my.csource.org:1026" + } + ], + "https://uri.etsi.org/ngsi-ld/expires": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2030-11-29T14:53:15" + } + ], + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@id": "urn:ngsi-ld:Drone:AP' || trunc(10000*random()) || '", + "@type": [ + "http://example.org/drone/Drone" + ] + } + ], + "https://uri.etsi.org/ngsi-ld/properties": [ + { + "@id": "https://json-ld.org/playground/modelName" + }, + { + "@id": "https://json-ld.org/playground/speed" + } + ], + "https://uri.etsi.org/ngsi-ld/relationships": [ + { + "@id": "https://json-ld.org/playground/isFlying" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@value": "{ \"type\": \"Polygon\", \"coordinates\": [[[8.686752319335938,49.359122687528746],[8.742027282714844,49.3642654834877],[8.767433166503904,49.398462568451485],[8.768119812011719,49.42750021620163],[8.74305725097656,49.44781634951542],[8.669242858886719,49.43754770762113],[8.63525390625,49.41968407776289],[8.637657165527344,49.3995797187007],[8.663749694824219,49.36851347448498],[8.686752319335938,49.359122687528746]]] }" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ] +} +' +)::jsonb + FROM generate_series(999996, 999999) i; + +insert into csource (id, data) +select +'urn:ngsi-ld:ContextSourceRegistration:AP' || i, +( +' +{ + "@id": "urn:ngsi-ld:ContextSourceRegistration:AP' || i || '", + "https://uri.etsi.org/ngsi-ld/name": [ + { + "@value": "NameExample' || i* trunc(100*random()) || '" + } + ], + "https://uri.etsi.org/ngsi-ld/description": [ + { + "@value": "DescriptionExample" + } + ], + "https://uri.etsi.org/ngsi-ld/endpoint": [ + { + "@value": "http://my.csource.org:1026" + } + ], + "https://uri.etsi.org/ngsi-ld/expires": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2030-11-29T14:53:15" + } + ], + "https://uri.etsi.org/ngsi-ld/information": [ + { + "https://uri.etsi.org/ngsi-ld/entities": [ + { + "@type": [ + "http://example.org/bus/Bus" + ] + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@value": "{ \"type\": \"Polygon\", \"coordinates\": [[[8.686752319335938,49.359122687528746],[8.742027282714844,49.3642654834877],[8.767433166503904,49.398462568451485],[8.768119812011719,49.42750021620163],[8.74305725097656,49.44781634951542],[8.669242858886719,49.43754770762113],[8.63525390625,49.41968407776289],[8.637657165527344,49.3995797187007],[8.663749694824219,49.36851347448498],[8.686752319335938,49.359122687528746]]] }" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/ContextSourceRegistration" + ] +} +' +)::jsonb + FROM generate_series(1000000, 1000999) i; \ No newline at end of file diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/test-performance-queries.sql b/scorpio-broker/Registry/RegistryManager/db-scripts/test-performance-queries.sql new file mode 100644 index 0000000000000000000000000000000000000000..3642110aa831850f9b91b934c8d09370a075b964 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/test-performance-queries.sql @@ -0,0 +1,36 @@ +\pset pager 0 +\set ECHO queries +\timing + +-- performance evaluation: IN vs JOIN + +EXPLAIN ANALYZE +SELECT data FROM csource +WHERE id IN (SELECT csource_id from csourceinformation + WHERE entity_type = 'http://example.org/vehicle/Vehicle' AND + entity_id = 'urn:ngsi-ld:Vehicle:AP3000'); +-- 108 ms + +EXPLAIN ANALYZE +SELECT c.data FROM csource c +INNER JOIN csourceinformation ci ON (ci.csource_id = c.id) +WHERE ci.entity_type = 'http://example.org/vehicle/Vehicle' AND + ci.entity_id = 'urn:ngsi-ld:Vehicle:AP3000'; +-- 3 ms + +EXPLAIN ANALYZE +SELECT c.data FROM csource c +INNER JOIN csourceinformation ci ON (ci.csource_id = c.id) +WHERE ci.entity_type = 'http://example.org/bus/Bus' AND + ci.entity_id IS NULL AND + ci.entity_idpattern IS NULL; +-- 25ms + +-- discarded. execution plan was exactly the same as "id in (..." approach. +/*EXPLAIN ANALYZE +SELECT c.data FROM csource c +WHERE EXISTS (SELECT csource_id from csourceinformation + WHERE entity_type = 'http://example.org/vehicle/Vehicle' AND + entity_id = 'urn:ngsi-ld:Vehicle:AP1000' AND + csource_id = c.id); +*/ \ No newline at end of file diff --git a/scorpio-broker/Registry/RegistryManager/db-scripts/tests-geo-reg-geojson.io-source.json b/scorpio-broker/Registry/RegistryManager/db-scripts/tests-geo-reg-geojson.io-source.json new file mode 100644 index 0000000000000000000000000000000000000000..b2bff509681dc5f215d717fc8ced74ccbaa8db7b --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/db-scripts/tests-geo-reg-geojson.io-source.json @@ -0,0 +1,221 @@ +{ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "properties": {}, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 8.2342529296875, + 49.21759710517596 + ], + [ + 8.957977294921875, + 49.21759710517596 + ], + [ + 8.957977294921875, + 49.681846899401286 + ], + [ + 8.2342529296875, + 49.681846899401286 + ], + [ + 8.2342529296875, + 49.21759710517596 + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "stroke": "#555555", + "stroke-width": 2, + "stroke-opacity": 1, + "fill": "yellow", + "fill-opacity": 0.5 + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 8.751983642578125, + 49.23463749585336 + ], + [ + 8.93463134765625, + 49.23463749585336 + ], + [ + 8.93463134765625, + 49.35286116650209 + ], + [ + 8.751983642578125, + 49.35286116650209 + ], + [ + 8.751983642578125, + 49.23463749585336 + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "stroke": "#555555", + "stroke-width": 2, + "stroke-opacity": 1, + "fill": "orange", + "fill-opacity": 0.5 + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 8.28643798828125, + 49.60715036117516 + ], + [ + 8.399047851562498, + 49.60715036117516 + ], + [ + 8.399047851562498, + 49.664961282899974 + ], + [ + 8.28643798828125, + 49.664961282899974 + ], + [ + 8.28643798828125, + 49.60715036117516 + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "stroke": "#555555", + "stroke-width": 2, + "stroke-opacity": 1, + "fill": "cyan", + "fill-opacity": 0.5 + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 8.5968017578125, + 49.384160800744986 + ], + [ + 8.76708984375, + 49.384160800744986 + ], + [ + 8.76708984375, + 49.44134289100633 + ], + [ + 8.5968017578125, + 49.44134289100633 + ], + [ + 8.5968017578125, + 49.384160800744986 + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "stroke": "#555555", + "stroke-width": 2, + "stroke-opacity": 1, + "fill": "purple", + "fill-opacity": 0.5 + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 8.34686279296875, + 49.298262740098345 + ], + [ + 8.850860595703125, + 49.298262740098345 + ], + [ + 8.850860595703125, + 49.55283460376055 + ], + [ + 8.34686279296875, + 49.55283460376055 + ], + [ + 8.34686279296875, + 49.298262740098345 + ] + ] + ] + } + }, + { + "type": "Feature", + "properties": { + "stroke": "#555555", + "stroke-width": 2, + "stroke-opacity": 1, + "fill": "#00aa09", + "fill-opacity": 0.5 + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 8.400421142578125, + 49.32333182991094 + ], + [ + 8.812408447265625, + 49.32333182991094 + ], + [ + 8.812408447265625, + 49.49489061140408 + ], + [ + 8.400421142578125, + 49.49489061140408 + ], + [ + 8.400421142578125, + 49.32333182991094 + ] + ] + ] + } + } + ] +} \ No newline at end of file diff --git a/scorpio-broker/Registry/RegistryManager/dockerfile4maven b/scorpio-broker/Registry/RegistryManager/dockerfile4maven new file mode 100644 index 0000000000000000000000000000000000000000..8a9cad9642bdedb91f078bb58edd160842fd8ec8 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/dockerfile4maven @@ -0,0 +1,13 @@ +FROM openjdk:22-ea-21-jdk-slim + +WORKDIR /usr/src/scorpio +ARG JAR_FILE_BUILD +ARG JAR_FILE_RUN +ENV JAR_FILE_RUN ${JAR_FILE_RUN} + +COPY target/${JAR_FILE_BUILD} ./${JAR_FILE_RUN} +COPY src/main/resources/application-dist.yml ./config/application.yml + +ENV spring_args "" + +CMD java -jar $JAR_FILE_RUN ${spring_args} diff --git a/scorpio-broker/Registry/RegistryManager/pom.xml b/scorpio-broker/Registry/RegistryManager/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..4aa3c3515ff08e4e91666999e3371d54dbd1a5a8 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/pom.xml @@ -0,0 +1,90 @@ + + 4.0.0 + RegistryManager + jar + + eu.neclab.ngsildbroker + BrokerParent + 1.0.0-SNAPSHOT + ../../BrokerParent + + 1.0.0-SNAPSHOT + + + docker + + + + com.spotify + dockerfile-maven-plugin + 1.4.12 + + + default + + build + push + + + + + dockerfile4maven + scorpiobroker/scorpio + ${project.artifactId}_${project.version} + + ${project.build.finalName}.jar + ${project.artifactId}.jar + + + + + + + + + + + eu.neclab.ngsildbroker + Commons + 1.0.0-SNAPSHOT + + + org.springframework.boot + spring-boot-starter-jdbc + + + org.postgresql + postgresql + + + + + org.locationtech.spatial4j + spatial4j + 0.7 + + + + org.locationtech.jts + jts-core + 1.15.1 + + + org.springframework.boot + spring-boot-starter-jdbc + + + org.springframework.boot + spring-boot-starter-logging + + + + + org.postgresql + postgresql + + + + diff --git a/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/RegistryHandler.java b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/RegistryHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..621e2cc0c70c3e32439620a595b1727e83129d30 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/RegistryHandler.java @@ -0,0 +1,81 @@ +package eu.neclab.ngsildbroker.registryhandler; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.cloud.stream.annotation.EnableBinding; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Import; +import org.springframework.http.client.ClientHttpRequestFactory; +import org.springframework.http.client.HttpComponentsClientHttpRequestFactory; +import org.springframework.web.client.RestTemplate; + +import eu.neclab.ngsildbroker.commons.datatypes.CSourceRegistration; +import eu.neclab.ngsildbroker.commons.ldcontext.AtContextProducerChannel; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.ngsiqueries.QueryParser; +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; +import eu.neclab.ngsildbroker.commons.securityConfig.SecurityConfig; +import eu.neclab.ngsildbroker.commons.stream.service.CommonKafkaConfig; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaConfig; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.swaggerConfig.SwaggerConfigDetails; +import eu.neclab.ngsildbroker.registryhandler.config.CSourceProducerChannel; + +//@Component(immediate=true) +@SpringBootApplication +@EnableBinding({ CSourceProducerChannel.class, AtContextProducerChannel.class }) +@Import({CommonKafkaConfig.class, SwaggerConfigDetails.class}) +public class RegistryHandler { + + public static void main(String[] args) { + SpringApplication.run(RegistryHandler.class); + } + + @Bean("rmops") + KafkaOps ops() { + return new KafkaOps(); + } + + @Bean("rmconRes") + ContextResolverBasic conRes() { + return new ContextResolverBasic(); + } + + @Bean("rmrestTemplate") + RestTemplate restTemplate() { + return new RestTemplate(clientHttpRequestFactory()); + } + + //rest template timeout configs + private ClientHttpRequestFactory clientHttpRequestFactory() { + HttpComponentsClientHttpRequestFactory factory = new HttpComponentsClientHttpRequestFactory(); + factory.setReadTimeout(10000); + factory.setConnectTimeout(10000); + return factory; + } + + @Bean("rmgetCsourceRegistration") + CSourceRegistration getCsourceRegistration() { + return new CSourceRegistration(); + } + + @Bean("rmsecurityConfig") + SecurityConfig securityConfig() { + return new SecurityConfig(); + } + + @Bean("rmresourceConfigDetails") + ResourceConfigDetails resourceConfigDetails() { + return new ResourceConfigDetails(); + } + @Bean("rmqueryParser") + QueryParser queryParser() { + return new QueryParser(); + } + @Bean("rmparamsResolver") + ParamsResolver paramsResolver() { + return new ParamsResolver(); + } + +} diff --git a/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/config/CSourceProducerChannel.java b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/config/CSourceProducerChannel.java new file mode 100644 index 0000000000000000000000000000000000000000..53a122793db347637c1b208e5725802ed2f704ed --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/config/CSourceProducerChannel.java @@ -0,0 +1,22 @@ +package eu.neclab.ngsildbroker.registryhandler.config; + +import org.springframework.cloud.stream.annotation.Output; +import org.springframework.messaging.MessageChannel; + +import eu.neclab.ngsildbroker.commons.stream.interfaces.IProducerChannels; + +public interface CSourceProducerChannel extends IProducerChannels { + public String csourceWriteChannel = "CSOURCE_REGISTRATION_WRITE_CHANNEL"; + public String csourceSubscriptionWriteChannel = "CSOURCE_SUBSCRIPTION_WRITE_CHANNEL"; + public String csourceNotificationWriteChannel = "CSOURCE_NOTIFICATION_WRITE_CHANNEL"; + + @Output(csourceWriteChannel) + MessageChannel csourceWriteChannel(); + + @Output(csourceSubscriptionWriteChannel) + MessageChannel csourceSubscriptionWriteChannel(); + + @Output(csourceNotificationWriteChannel) + MessageChannel csourceNotificationWriteChannel(); + +} diff --git a/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/config/RegistryManagerResourceConfigurer.java b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/config/RegistryManagerResourceConfigurer.java new file mode 100644 index 0000000000000000000000000000000000000000..7dd4a328c5d74a648e7f507df5bfdda5d6bfd636 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/config/RegistryManagerResourceConfigurer.java @@ -0,0 +1,27 @@ +package eu.neclab.ngsildbroker.registryhandler.config; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer; +import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter; +import eu.neclab.ngsildbroker.commons.securityConfig.ResourceConfigDetails; + +/** + * REST API Resource Server. + */ +@Configuration +@EnableWebSecurity +@EnableResourceServer +@EnableGlobalMethodSecurity(prePostEnabled = true) // Allow method annotations like @PreAuthorize +public class RegistryManagerResourceConfigurer extends ResourceServerConfigurerAdapter { + @Autowired + private ResourceConfigDetails resourceConfigDetails; + + @Override + public void configure(HttpSecurity http) throws Exception { + resourceConfigDetails.ngbSecurityConfig(http); + } +} diff --git a/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/config/StartupConfig.java b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/config/StartupConfig.java new file mode 100644 index 0000000000000000000000000000000000000000..55a60ddc4a00effb26e9f74b72d7189a38b561c5 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/config/StartupConfig.java @@ -0,0 +1,240 @@ +package eu.neclab.ngsildbroker.registryhandler.config; + +import java.net.URI; +import java.net.URISyntaxException; +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import javax.annotation.PostConstruct; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.stereotype.Component; +import org.springframework.stereotype.Service; +import org.springframework.web.client.HttpClientErrorException; +import org.springframework.web.client.HttpServerErrorException; +import org.springframework.web.client.RestTemplate; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.netflix.discovery.EurekaClient; + +import eu.neclab.ngsildbroker.commons.datatypes.CSourceRegistration; +import eu.neclab.ngsildbroker.commons.datatypes.Information; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.tools.MicroServiceUtils; +import eu.neclab.ngsildbroker.registryhandler.repository.CSourceDAO; + +@Service +public class StartupConfig { + + @Autowired + @Qualifier("rmrestTemplate") + RestTemplate restTemplate; + @Autowired + EurekaClient eurekaClient; + @Value("${broker.id:#{null}}") + String id; + @Value("${broker.geoCoverage:#{null}}") + String geom; + @Value("${broker.reginfo:#{null}}") + String reginfo; + @Value("${broker.parent.location.url:SELF}") + String parentUrl; + @Value("${broker.customEndpoint:#{null}}") + String customEndpoint; + @Value("${broker.regOnlyLocal:#{false}}") + boolean localOnlyAutoReg; + @Autowired + KafkaOps operations; + @Value("${csource.source.topic}") + String CSOURCE_TOPIC; + @Autowired + ObjectMapper objectMapper; + @Autowired + CSourceDAO cSourceDAO; + boolean registered = false; + private String currentRegistration = null; + // String s="\"type\": \"Polygon\",\"coordinates\": [[[100.0, 0.0],[101.0, + // 0.0],[101.0, 1.0],[100.0, 1.0],[100.0, 0.0] ] ]"; + + private final static Logger logger = LoggerFactory.getLogger(StartupConfig.class); + + @PostConstruct + public void init() { + + logger.debug("registering broker with parent :: " + parentUrl); + // abort registration in case of fedration broker (SELF) + if ("SELF".equalsIgnoreCase(parentUrl)) { + logger.debug("Parent Broker settings detected abort registration."); + return; + } + if (parentUrl == null || geom == null) { + logger.error("registration with parent falied : no endpoint and geom specified "); + return; + } + try { + new URI(id); + } catch (URISyntaxException e1) { + logger.error("aborting registration. your id has to be a uri"); + return; + } + String endpoint; + if (customEndpoint != null && !customEndpoint.isEmpty()) { + logger.info("using custom endpoint " + customEndpoint); + endpoint = customEndpoint; + } else { + // TODO this has to be changed because the registry manager just straight up + // crashes if the gateway is not up yet + try { + endpoint = MicroServiceUtils.getResourceURL(eurekaClient, ""); + } catch (Exception e) { + logger.error( + "Failed to retrieve endpoint url. Please make sure that the gateway is running or provide a customEndpoint entry"); + return; + } + } + if (!parentUrl.endsWith("/")) { + parentUrl += "/"; + } + URI parentUri; + URI parentPatchUri; + try { + parentUri = new URI(parentUrl); + parentPatchUri = new URI(parentUrl + id); + } catch (URISyntaxException e1) { + logger.error("your parentUrl is not a valid uri"); + return; + } + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + String payload = getPayload(endpoint); + HttpEntity entity = new HttpEntity(payload, headers); + try { + // set headers + logger.info("registering with fed broker " + parentUrl); + logger.info("payload ::" + payload); + + // call + restTemplate.postForObject(parentUri, entity, String.class); + registered = true; + logger.debug("Broker registered with parent at :" + parentUrl); + } catch (HttpClientErrorException | HttpServerErrorException httpClientOrServerExc) { + logger.error("status code::" + httpClientOrServerExc.getStatusCode()); + logger.error("Message::" + httpClientOrServerExc.getMessage()); + if (HttpStatus.INTERNAL_SERVER_ERROR.equals(httpClientOrServerExc.getStatusCode())) { + logger.error("Broker registration failed due to parent broker."); + } + if (HttpStatus.CONFLICT.equals(httpClientOrServerExc.getStatusCode())) { + logger.debug("Broker already registered with parent. Attempting patch"); + try { + restTemplate.patchForObject(parentPatchUri, entity, String.class); + } catch (Exception e) { + logger.error("patching failed"); + } + } + } catch (Exception e) { + logger.error("failed to register with parent completly", e); + } + + } + + // minimum payload for csource registration + private String getPayload(String endpoint) { + // @formatter:off + return "{\r\n" + " \"id\": \"" + id + "\",\r\n" + " \"type\": \"ContextSourceRegistration\",\r\n" + + " \"information\": " + getCSInformationNode() + ",\r\n" + " \"endpoint\": \"" + endpoint + "\",\r\n" + + " \"location\": \"" + geom + "\",\r\n" + " \"timestamp\": {\r\n" + " \"start\": \"" + + LocalDateTime.now() + "\"\r\n" + " }\r\n" + "}"; + // @formatter:on + } + + public void handleUpdatedTypesForFed() { + if(!registered) { + return; + } + // don't know a better way. wait a moment for the database to actually change. + try { + Thread.sleep(200); + } catch (InterruptedException e) { + // unchanged intentional + e.printStackTrace(); + } + String current = this.currentRegistration; + if (!current.equals(getCSInformationNode())) { + String payload = "{\"information\": " + getCSInformationNode() + "}"; + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + HttpEntity entity = new HttpEntity(payload, headers); + try { + URI parentPatchUri = new URI(parentUrl + id); + logger.debug("payload ::" + payload); + // call + restTemplate.patchForObject(parentPatchUri, entity, String.class); + logger.debug("Broker registered with parent at :" + parentUrl); + } catch (HttpClientErrorException | HttpServerErrorException httpClientOrServerExc) { + logger.error("status code::" + httpClientOrServerExc.getStatusCode()); + logger.error("Message::" + httpClientOrServerExc.getMessage()); + } catch (Exception e) { + logger.error("failed to update registery with parent completly", e.getMessage()); + } + + } + } + + private String getCSInformationNode() { + String resultString = null; + if (reginfo != null) { + return reginfo; + } + List types; + if (localOnlyAutoReg) { + types = cSourceDAO.getLocalTypes(); + } else { + types = cSourceDAO.getAllTypes(); + } + if (types==null || types.isEmpty()) { + resultString = "[]"; + } else { + StringBuilder result = new StringBuilder("[{\"entities\": ["); + + for (String type : types) { + result.append("{\"type\": \"" + type + "\"},"); + } + result.deleteCharAt(result.length() - 1); + result.append("]}]"); + resultString = result.toString(); + } + this.currentRegistration = resultString; + return resultString; + + // + // Map records = operations.pullFromKafka(this.CSOURCE_TOPIC); + // // @formatter:off + // Map streamRecords = records.entrySet().stream() + // .collect(Collectors.toMap(Map.Entry::getKey, e -> new String(e.getValue()))); + // // @formatter:on + // return this.getInformationNode(streamRecords); + } + + private String getInformationNode(Map records) throws URISyntaxException { + logger.trace("getCSourceRegistrationFromJson() :: started"); + List information = new ArrayList(); + for (String s : records.keySet()) { + CSourceRegistration cSource = DataSerializer.getCSourceRegistration(records.get(s)); + information.addAll(cSource.getInformation()); + } + String informationString = DataSerializer.toJson(information); + return informationString; + } +} diff --git a/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/controller/RegistryController.java b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/controller/RegistryController.java new file mode 100644 index 0000000000000000000000000000000000000000..c22bbc933e5288c816c7c878377cf6649ca760c3 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/controller/RegistryController.java @@ -0,0 +1,239 @@ +package eu.neclab.ngsildbroker.registryhandler.controller; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import javax.annotation.PostConstruct; +import javax.servlet.http.HttpServletRequest; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PatchMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.fasterxml.jackson.databind.node.TextNode; +import com.netflix.discovery.EurekaClient; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.CSourceRegistration; +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.datatypes.RestResponse; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.ngsiqueries.ParamsResolver; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; +import eu.neclab.ngsildbroker.registryhandler.repository.CSourceDAO; +import eu.neclab.ngsildbroker.registryhandler.service.CSourceService; + +/** + * + * @version 1.0 + * @date 20-Jul-2018 + */ +@RestController +@RequestMapping("/ngsi-ld/v1/csourceRegistrations") +public class RegistryController { + private final static Logger logger = LoggerFactory.getLogger(RegistryController.class); + private final static String MY_REQUEST_MAPPING = "/ngsi-ld/v1/csourceRegistrations"; + private final static String MY_REQUEST_MAPPING_ALT = "/ngsi-ld/v1/csourceRegistrations/"; + + @Autowired + EurekaClient eurekaClient; + @Autowired + CSourceService csourceService; + @Autowired + @Qualifier("rmconRes") + ContextResolverBasic contextResolver; + @Autowired + @Qualifier("rmparamsResolver") + ParamsResolver paramsResolver; + @Autowired + CSourceDAO csourceDAO; + @Autowired + ObjectMapper objectMapper; + private HttpUtils httpUtils; + + @PostConstruct + private void setup() { + this.httpUtils = HttpUtils.getInstance(contextResolver); + } + + // @GetMapping + // public ResponseEntity discoverCSource(HttpServletRequest request, + // @RequestParam HashMap queryMap) { + // try { + // return ResponseEntity.status(HttpStatus.OK) + // .body(csourceService.getCSourceRegistrations(queryMap)); + // } catch (ResponseException exception) { + // return ResponseEntity.status(exception.getHttpStatus()).body(new + // RestResponse(exception)); + // } catch (Exception e) { + // return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + // .body(new RestResponse(HttpStatus.INTERNAL_SERVER_ERROR, "Internal server + // error", + // "Internal error"))); + // } + // } + + @GetMapping + public ResponseEntity discoverCSource(HttpServletRequest request, + @RequestParam HashMap queryMap) { + try { + logger.trace("getCSources() ::"); + String queryParams = request.getQueryString(); + if ((request.getRequestURI().equals(MY_REQUEST_MAPPING) + || request.getRequestURI().equals(MY_REQUEST_MAPPING_ALT)) && queryParams != null) { + + List linkHeaders = HttpUtils.parseLinkHeader(request, NGSIConstants.HEADER_REL_LDCONTEXT); + QueryParams qp = paramsResolver.getQueryParamsFromUriQuery(request.getParameterMap(), linkHeaders); + if (qp == null) // invalid query + throw new ResponseException(ErrorType.InvalidRequest); + List csourceList = csourceDAO.query(qp); + if (csourceList.size() > 0) { + return httpUtils.generateReply(request, csourceDAO.getListAsJsonArray(csourceList)); + } else { + throw new ResponseException(ErrorType.NotFound); + } + } else { + // spec v0.9.0 section 5.10.2.4: if neither Entity types nor Attribute names are + // provided, an error of BadRequestData shall be raised + throw new ResponseException(ErrorType.BadRequestData, "You must provide at least type or attrs as parameter"); + } + } catch (ResponseException exception) { + logger.error("Exception ::", exception); + return ResponseEntity.status(exception.getHttpStatus()).body(new RestResponse(exception).toJsonBytes()); + } catch (Exception exception) { + logger.error("Exception ::", exception); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, exception.getLocalizedMessage()).toJsonBytes()); + } + } + + @PostMapping + public ResponseEntity registerCSource(HttpServletRequest request, + @RequestBody(required = false) String payload) { + try { + HttpUtils.doPreflightCheck(request, payload); + logger.info("payload received :: " + payload); + + this.validate(payload); + + String resolved = httpUtils.expandPayload(request, payload, AppConstants.CSOURCE_URL_ID); + + logger.info("Resolved payload::" + resolved); + CSourceRegistration csourceRegistration = DataSerializer.getCSourceRegistration(resolved); + logger.info("Csource :: " + csourceRegistration); + URI uri = csourceService.registerCSource(csourceRegistration); + + return ResponseEntity.status(HttpStatus.CREATED).header("location", AppConstants.CSOURCE_URL + uri).build(); + } catch (ResponseException exception) { + return ResponseEntity.status(exception.getHttpStatus()).body(new RestResponse(exception).toJsonBytes()); + } catch (Exception e) { + e.printStackTrace(); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, e.getLocalizedMessage()).toJsonBytes()); + } + } + + @GetMapping("{registrationId}") + public ResponseEntity getCSourceById(HttpServletRequest request, + @PathVariable("registrationId") String registrationId) { + try { + logger.debug("get CSource() ::" + registrationId); + List csourceList = new ArrayList(); + csourceList.add(DataSerializer.toJson(csourceService.getCSourceRegistrationById(registrationId))); + return httpUtils.generateReply(request, csourceDAO.getListAsJsonArray(csourceList)); + } catch (ResponseException exception) { + return ResponseEntity.status(exception.getHttpStatus()).body(new RestResponse(exception).toJsonBytes()); + } catch (Exception e) { + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, e.getLocalizedMessage()).toJsonBytes()); + } + } + + @PatchMapping("{registrationId}") + public ResponseEntity updateCSource(HttpServletRequest request, + @PathVariable("registrationId") String registrationId, @RequestBody String payload) { + try { + HttpUtils.doPreflightCheck(request, payload); + logger.debug("update CSource() ::" + registrationId); + String resolved = httpUtils.expandPayload(request, payload, AppConstants.CSOURCE_URL_ID); + + csourceService.updateCSourceRegistration(registrationId, resolved); + logger.debug("update CSource request completed::" + registrationId); + return ResponseEntity.noContent().build(); + } catch (ResponseException exception) { + return ResponseEntity.status(exception.getHttpStatus()).body(new RestResponse(exception).toJsonBytes()); + } catch (Exception e) { + e.printStackTrace(); + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, e.getLocalizedMessage()).toJsonBytes()); + } + } + + @DeleteMapping("{registrationId}") + public ResponseEntity deleteCSource(@PathVariable("registrationId") String registrationId) { + try { + logger.debug("delete CSource() ::" + registrationId); + csourceService.deleteCSourceRegistration(registrationId); + logger.debug("delete CSource() completed::" + registrationId); + return ResponseEntity.noContent().build(); + } catch (ResponseException exception) { + return ResponseEntity.status(exception.getHttpStatus()).body(new RestResponse(exception).toJsonBytes()); + } catch (Exception e) { + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR) + .body(new RestResponse(ErrorType.InternalError, e.getLocalizedMessage()).toJsonBytes()); + } + } + + private void validate(String payload) throws ResponseException { + logger.trace("validation :: started"); + if (payload == null) { + throw new ResponseException(ErrorType.UnprocessableEntity); + } + JsonNode json = null; + try { + json = objectMapper.readTree(payload); + if (json.isNull()) { + throw new ResponseException(ErrorType.UnprocessableEntity); + } +// if (json.get(NGSIConstants.QUERY_PARAMETER_ID) == null) { +// if(json.isObject()) { +// ((ObjectNode)json).set(NGSIConstants.QUERY_PARAMETER_ID, new TextNode(generateUniqueRegId(payload))); +// }else { +// throw new ResponseException(ErrorType.BadRequestData); +// } +// +// } + } catch (JsonParseException e) { + throw new ResponseException(ErrorType.BadRequestData); + } catch (IOException e) { + throw new ResponseException(ErrorType.BadRequestData); + } + logger.trace("validation :: completed"); + } + +} diff --git a/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/controller/RegistrySubscriptionController.java b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/controller/RegistrySubscriptionController.java new file mode 100644 index 0000000000000000000000000000000000000000..6e51ca1370b5ef3f248ec086072f2d5822b2076e --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/controller/RegistrySubscriptionController.java @@ -0,0 +1,172 @@ +package eu.neclab.ngsildbroker.registryhandler.controller; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.List; + +import javax.servlet.http.HttpServletRequest; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.DeleteMapping; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PatchMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseBody; +import org.springframework.web.bind.annotation.RestController; + +import com.netflix.discovery.EurekaClient; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.RestResponse; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.datatypes.SubscriptionRequest; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; +import eu.neclab.ngsildbroker.registryhandler.service.CSourceSubscriptionService; + + +@RestController +@RequestMapping("/ngsi-ld/v1/csourceSubscriptions") +public class RegistrySubscriptionController { + + private final static Logger logger = LogManager.getLogger(RegistrySubscriptionController.class); + + @Autowired + CSourceSubscriptionService manager; + + @Autowired + @Qualifier("rmconRes") + ContextResolverBasic contextResolver; + + + @Autowired + @Qualifier("rmops") + KafkaOps kafkaOps; + + @Autowired + EurekaClient eurekaClient; + + HttpUtils httpUtils = HttpUtils.getInstance(contextResolver); + + ResponseException badRequest = new ResponseException(ErrorType.BadRequestData); + + ResponseEntity badRequestResponse = ResponseEntity.status(badRequest.getHttpStatus()) + .body(new RestResponse(badRequest).toJsonBytes()); + // @PostConstruct + // private void setupContextResolver() { + // this.contextResolver = + // ContextResolverService.getInstance(producerChannel.atContextWriteChannel(), + // kafkaOps); + // } + // public SubscriptionController(SubscriptionManagerProducerChannel prodChannel) + // { + // this.contextResolver = new + // ContextResolverService(prodChannel.atContextWriteChannel()); + // } + + + @PostMapping + public ResponseEntity subscribeRest(HttpServletRequest request, @RequestBody String payload) throws ResponseException { + logger.trace("subscribeRest() :: started"); + Subscription subscription; + + + List context = HttpUtils.getAtContext(request); + String resolved = contextResolver.expand(payload, context, true, AppConstants.CSOURCE_URL_ID); + + subscription = DataSerializer.getSubscription(resolved); + if (resolved == null || subscription == null) { + return badRequestResponse; + } + + try { + SubscriptionRequest subscriptionRequest = new SubscriptionRequest(subscription, context); + URI subId = manager.subscribe(subscriptionRequest); + logger.trace("subscribeRest() :: completed"); + //no absolute url only relative url + return ResponseEntity.created(new URI("/ngsi-ld/v1/csourceSubscriptions/" + subId.toString())).body(subId.toString().getBytes()); + } catch (ResponseException e) { + logger.error("Exception ::",e); + return ResponseEntity.status(e.getHttpStatus()).body(new RestResponse(e).toJsonBytes()); + } catch (URISyntaxException e) { + logger.error("Exception ::",e); + return ResponseEntity.status(HttpStatus.CONFLICT).body(subscription.getId().toString().getBytes()); + } + } + + @GetMapping + public ResponseEntity getAllSubscriptions(HttpServletRequest request, @RequestParam(required = false, name = "limit", defaultValue = "0") int limit) throws ResponseException{ + logger.trace("getAllSubscriptions() :: started"); + List result = null; + result = manager.getAllSubscriptions(limit); + logger.trace("getAllSubscriptions() :: completed"); + return httpUtils.generateReply(request, DataSerializer.toJson(result)); + } + + @GetMapping("{id}") + //(method = RequestMethod.GET, value = "/{id}") + public ResponseEntity getSubscriptions(HttpServletRequest request, @PathVariable(name = NGSIConstants.QUERY_PARAMETER_ID, required = true) URI id, + @RequestParam(required = false, name = "limit", defaultValue = "0") int limit) { + try { + logger.trace("call getSubscriptions() ::"); + return httpUtils.generateReply(request, DataSerializer.toJson(manager.getSubscription(id))); + + } catch (ResponseException e) { + logger.error("Exception ::",e); + return ResponseEntity.status(e.getHttpStatus()).body(new RestResponse(e).toJsonBytes()); + } + + + } + + @DeleteMapping("{id}") + @RequestMapping(method = RequestMethod.DELETE, value = "/{id}") + public ResponseEntity deleteSubscription(@PathVariable(name = NGSIConstants.QUERY_PARAMETER_ID, required = true) URI id) { + try { + logger.trace("call deleteSubscription() ::"); + manager.unsubscribe(id); + } catch (ResponseException e) { + logger.error("Exception ::",e); + return ResponseEntity.status(e.getHttpStatus()).body(new RestResponse(e).toJsonBytes()); + } + return ResponseEntity.noContent().build(); + } + + @PatchMapping("{id}") + public ResponseEntity updateSubscription(HttpServletRequest request, @PathVariable(name = NGSIConstants.QUERY_PARAMETER_ID, required = true) URI id, + @RequestBody String payload) throws ResponseException { + logger.trace("call updateSubscription() ::"); + List context = HttpUtils.getAtContext(request); + String resolved = contextResolver.expand(payload, context, true, AppConstants.CSOURCE_URL_ID); + Subscription subscription = DataSerializer.getSubscription(resolved); + if(subscription.getId() == null) { + subscription.setId(id); + } + if (resolved == null || subscription == null || !id.equals(subscription.getId())) { + return badRequestResponse; + } + try { + manager.updateSubscription(subscription); + } catch (ResponseException e) { + logger.error("Exception ::",e); + return ResponseEntity.status(e.getHttpStatus()).body(new RestResponse(e).toJsonBytes()); + } + return ResponseEntity.noContent().build(); + } + +} diff --git a/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/repository/CSourceDAO.java b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/repository/CSourceDAO.java new file mode 100644 index 0000000000000000000000000000000000000000..89d00cba67a71e8e3802e0f440f3a98a8c44b0e1 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/repository/CSourceDAO.java @@ -0,0 +1,329 @@ +package eu.neclab.ngsildbroker.registryhandler.repository; + +import java.sql.SQLException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.commons.lang.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.stereotype.Repository; + +import eu.neclab.ngsildbroker.commons.constants.DBConstants; +import eu.neclab.ngsildbroker.commons.constants.NGSIConstants; +import eu.neclab.ngsildbroker.commons.datatypes.GeoqueryRel; +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.storage.StorageReaderDAO; + +@Repository("rmcsourcedao") +public class CSourceDAO extends StorageReaderDAO { + + private final static Logger logger = LogManager.getLogger(CSourceDAO.class); + + protected final static String DBCOLUMN_CSOURCE_INFO_ENTITY_ID = "entity_id"; + protected final static String DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN = "entity_idpattern"; + protected final static String DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE = "entity_type"; + protected final static String DBCOLUMN_CSOURCE_INFO_PROPERTY_ID = "property_id"; + protected final static String DBCOLUMN_CSOURCE_INFO_RELATIONSHIP_ID = "relationship_id"; + + protected final static Map NGSILD_TO_SQL_RESERVED_PROPERTIES_MAPPING_GEO = initNgsildToSqlReservedPropertiesMappingGeo(); + + protected static Map initNgsildToSqlReservedPropertiesMappingGeo() { + Map map = new HashMap<>(); + map.put(NGSIConstants.NGSI_LD_LOCATION, DBConstants.DBCOLUMN_LOCATION); + return Collections.unmodifiableMap(map); + } + + protected final static Map NGSILD_TO_POSTGIS_GEO_OPERATORS_MAPPING = initNgsildToPostgisGeoOperatorsMapping(); + + protected static Map initNgsildToPostgisGeoOperatorsMapping() { + Map map = new HashMap<>(); + map.put(NGSIConstants.GEO_REL_NEAR, null); + map.put(NGSIConstants.GEO_REL_WITHIN, DBConstants.POSTGIS_INTERSECTS); + map.put(NGSIConstants.GEO_REL_CONTAINS, DBConstants.POSTGIS_CONTAINS); + map.put(NGSIConstants.GEO_REL_OVERLAPS, null); + map.put(NGSIConstants.GEO_REL_INTERSECTS, DBConstants.POSTGIS_INTERSECTS); + map.put(NGSIConstants.GEO_REL_EQUALS, DBConstants.POSTGIS_CONTAINS); + map.put(NGSIConstants.GEO_REL_DISJOINT, null); + return Collections.unmodifiableMap(map); + } + + private boolean externalCsourcesOnly = false; + + @Override + public List query(QueryParams qp) { + this.externalCsourcesOnly = false; + return super.query(qp); + } + + public List queryExternalCsources(QueryParams qp) throws SQLException { + this.externalCsourcesOnly = true; + return super.query(qp); + } + + @Override + protected String translateNgsildQueryToSql(QueryParams qp) throws ResponseException { + StringBuilder fullSqlWhere = new StringBuilder(70); + String sqlWhere = ""; + boolean csourceInformationIsNeeded = false; + boolean sqlOk = false; + + if (externalCsourcesOnly) { + fullSqlWhere.append("(c.internal = false) AND "); + } + + // query by type + (id, idPattern) + if (qp.getType()!=null) { + + String typeValue = qp.getType(); + String idValue = ""; + String idPatternValue = ""; + if (qp.getId()!=null) + idValue = qp.getId(); + if (qp.getIdPattern()!=null) + idPatternValue = qp.getIdPattern(); + // id takes precedence on idPattern. clear idPattern if both are given + if (!idValue.isEmpty() && !idPatternValue.isEmpty()) + idPatternValue = ""; + + // query by type + (id, idPattern) + attrs + if (qp.getAttrs()!=null) { + String attrsValue = qp.getAttrs(); + sqlWhere = getCommonSqlWhereForTypeIdIdPattern(typeValue, idValue, idPatternValue); + sqlWhere += " AND "; + sqlWhere += getSqlWhereByAttrsInTypeFiltering(attrsValue); + + } else { // query by type + (id, idPattern) only (no attrs) + + sqlWhere = "(c.has_registrationinfo_with_attrs_only) OR "; + sqlWhere += getCommonSqlWhereForTypeIdIdPattern(typeValue, idValue, idPatternValue); + + } + fullSqlWhere.append("(" + sqlWhere + ") AND "); + csourceInformationIsNeeded = true; + sqlOk = true; + + // query by attrs only + } else if (qp.getAttrs()!=null) { + String attrsValue = qp.getAttrs(); + if (attrsValue.indexOf(",") == -1) { + sqlWhere = "ci." + DBCOLUMN_CSOURCE_INFO_PROPERTY_ID+" = '"+attrsValue+"' OR " + +"ci." + DBCOLUMN_CSOURCE_INFO_RELATIONSHIP_ID+" = '"+attrsValue+"'"; + }else { + sqlWhere="ci." + DBCOLUMN_CSOURCE_INFO_PROPERTY_ID+" IN ('"+attrsValue.replace(",", "','")+"') OR " + +"ci." + DBCOLUMN_CSOURCE_INFO_RELATIONSHIP_ID+" IN ('"+attrsValue.replace(",", "','")+"')"; + } + fullSqlWhere.append("(" + sqlWhere + ") AND "); + csourceInformationIsNeeded = true; + sqlOk = true; + } + + // advanced query "q" + if (qp.getQ()!=null) { + // TODO: it's not clear in spec how this should work + logger.error("'q' filter has not been developed yet in csource discovery!"); + return ""; + } + + // geoquery + if (qp.getGeorel()!=null) { + GeoqueryRel gqr = qp.getGeorel(); + logger.debug("Georel value " + gqr.getGeorelOp()); + try { + sqlWhere = translateNgsildGeoqueryToPostgisQuery(gqr, qp.getGeometry(), qp.getCoordinates(), + qp.getGeoproperty()); + } catch (ResponseException e) { + e.printStackTrace(); + } + fullSqlWhere.append(sqlWhere + " AND "); + sqlOk = true; + } + + if (sqlOk) { + String sqlQuery = "SELECT DISTINCT c.data " + "FROM " + DBConstants.DBTABLE_CSOURCE + " c "; + if (csourceInformationIsNeeded) + sqlQuery += "INNER JOIN " + DBConstants.DBTABLE_CSOURCE_INFO + " ci ON (ci.csource_id = c.id) "; + + if (fullSqlWhere.length() > 0) { + sqlQuery += "WHERE " + fullSqlWhere.toString() + " 1=1 "; + } + // order by ? + return sqlQuery; + } else { + return ""; + } + } + + private String getCommonSqlWhereForTypeIdIdPattern(String typeValue, String idValue, String idPatternValue) { + String sqlWhere = ""; + if (idValue.isEmpty() && idPatternValue.isEmpty()) { // case 1: type only + sqlWhere += getSqlWhereByType(typeValue, false); + } else if (!idValue.isEmpty() && idPatternValue.isEmpty()) { // case 2: type+id + sqlWhere += "("; + sqlWhere += getSqlWhereByType(typeValue, true); + sqlWhere += " OR "; + sqlWhere += getSqlWhereById(typeValue, idValue); + sqlWhere += ")"; + } else if (idValue.isEmpty() && !idPatternValue.isEmpty()) { // case 3: type+idPattern + sqlWhere += "("; + sqlWhere += getSqlWhereByType(typeValue, true); + sqlWhere += " OR "; + sqlWhere += getSqlWhereByIdPattern(typeValue, idPatternValue); + sqlWhere += ")"; + } + return sqlWhere; + } + + private String getSqlWhereByType(String typeValue, boolean includeIdAndIdPatternNullTest) { + String sqlWhere = "("; + if (typeValue.indexOf(",") == -1) { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " = '" + typeValue + "' "; + } else { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " IN ('" + typeValue.replace(",", "','") + "') "; + } + if (includeIdAndIdPatternNullTest) + sqlWhere += "AND ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_ID + " IS NULL AND " + + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN + " IS NULL"; + sqlWhere += ")"; + return sqlWhere; + } + + private String getSqlWhereById(String typeValue, String idValue) { + String sqlWhere = "( "; + + if (typeValue.indexOf(",") == -1) { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " = '" + typeValue + "' AND "; + } else { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " IN ('" + typeValue.replace(",", "','") + "') AND "; + } + + if (idValue.indexOf(",") == -1) { + sqlWhere += "(" + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_ID + " = '" + idValue + "' OR " + "'" + + idValue + "' ~ " + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN + ")"; + } else { + String[] ids = idValue.split(","); + String whereId = "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_ID + " IN ( "; + String whereIdPattern = "("; + for (String id : ids) { + whereId += "'" + id + "',"; + whereIdPattern += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN + " ~ '" + id + + "' OR "; + } + whereId = StringUtils.chomp(whereId, ","); + whereIdPattern = StringUtils.chomp(whereIdPattern, "OR "); + whereId += ")"; + whereIdPattern += ")"; + + sqlWhere += "(" + whereId + " OR " + whereIdPattern + ")"; + } + + sqlWhere += " )"; + return sqlWhere; + } + + private String getSqlWhereByIdPattern(String typeValue, String idPatternValue) { + String sqlWhere = "( "; + if (typeValue.indexOf(",") == -1) { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " = '" + typeValue + "' AND "; + } else { + sqlWhere += "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_TYPE + " IN ('" + typeValue.replace(",", "','") + "') AND "; + } + sqlWhere += "(" + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_ID + " ~ '" + idPatternValue + "' OR " + + "ci." + DBCOLUMN_CSOURCE_INFO_ENTITY_IDPATTERN + " ~ '" + idPatternValue + "')"; + sqlWhere += " )"; + return sqlWhere; + } + + private String getSqlWhereByAttrsInTypeFiltering(String attrsValue) { + String sqlWhere; + sqlWhere = "( " + + "NOT EXISTS (SELECT 1 FROM csourceinformation ci2 " + + " WHERE ci2.group_id = ci.group_id AND " + + " (ci2.property_id IS NOT NULL OR ci2.relationship_id IS NOT NULL)) " + + "OR " + + "EXISTS (SELECT 1 FROM csourceinformation ci3 " + + " WHERE ci3.group_id = ci.group_id AND " ; + if (attrsValue.indexOf(",") == -1) { + sqlWhere += "(ci3.property_id = '" + attrsValue + "' OR " + + " ci3.relationship_id = '" + attrsValue + "') "; + } else { + sqlWhere += "(ci3.property_id IN ('" + attrsValue.replace(",", "','") + "') OR " + + " ci3.relationship_id IN ('" + attrsValue.replace(",", "','") + "') ) "; + } + sqlWhere += ") )"; + return sqlWhere; + } + + // TODO: SQL input sanitization + // TODO: property of property + // TODO: [SPEC] spec is not clear on how to define a "property of property" in + // the geoproperty field. (probably using dots, but...) + @Override + protected String translateNgsildGeoqueryToPostgisQuery(GeoqueryRel georel, String geometry, String coordinates, + String geoproperty) throws ResponseException { + if (georel.getGeorelOp().isEmpty() || geometry==null || coordinates==null || geometry.isEmpty() || coordinates.isEmpty()) { + logger.error("georel, geometry and coordinates are empty or invalid!"); + throw new ResponseException(ErrorType.BadRequestData, + "georel, geometry and coordinates are empty or invalid!"); + } + + StringBuilder sqlWhere = new StringBuilder(50); + + String georelOp = georel.getGeorelOp(); + logger.debug(" Geoquery term georelOp: " + georelOp); + + String dbColumn = NGSILD_TO_SQL_RESERVED_PROPERTIES_MAPPING_GEO.get(geoproperty); + if (dbColumn == null) { + dbColumn = "ST_SetSRID(ST_GeomFromGeoJSON( c.data#>>'{" + geoproperty + ",0," + + NGSIConstants.JSON_LD_VALUE + "}'), 4326)"; + } else { + dbColumn = "c." + dbColumn; + } + + String referenceValue = "ST_SetSRID(ST_GeomFromGeoJSON('{\"type\": \"" + geometry + "\", \"coordinates\": " + + coordinates + " }'), 4326)"; + + switch (georelOp) { + case NGSIConstants.GEO_REL_WITHIN: + case NGSIConstants.GEO_REL_CONTAINS: + case NGSIConstants.GEO_REL_INTERSECTS: + case NGSIConstants.GEO_REL_EQUALS: + sqlWhere.append(NGSILD_TO_POSTGIS_GEO_OPERATORS_MAPPING.get(georelOp) + "( " + dbColumn + ", " + + referenceValue + ") "); + break; + case NGSIConstants.GEO_REL_NEAR: + if (georel.getDistanceType()!=null && georel.getDistanceValue()!=null) { + if (georel.getDistanceType().equals(NGSIConstants.GEO_REL_MIN_DISTANCE)) + sqlWhere.append("NOT " + DBConstants.POSTGIS_WITHIN + "( " + dbColumn + ", ST_Buffer(" + referenceValue + + "::geography, " + georel.getDistanceValue() + + ")::geometry ) "); + else + sqlWhere.append(DBConstants.POSTGIS_INTERSECTS + "( " + dbColumn + ", ST_Buffer(" + referenceValue + + "::geography, " + georel.getDistanceValue() + + ")::geometry ) "); + } else { + throw new ResponseException(ErrorType.BadRequestData, + "GeoQuery: Type and distance are required for near relation"); + } + break; + case NGSIConstants.GEO_REL_OVERLAPS: + sqlWhere.append("("); + sqlWhere.append(DBConstants.POSTGIS_OVERLAPS + "( " + dbColumn + ", " + referenceValue + ")"); + sqlWhere.append(" OR "); + sqlWhere.append(DBConstants.POSTGIS_CONTAINS + "( " + dbColumn + ", " + referenceValue + ")"); + sqlWhere.append(")"); + break; + case NGSIConstants.GEO_REL_DISJOINT: + sqlWhere.append("NOT " + DBConstants.POSTGIS_WITHIN + "( " + dbColumn + ", " + referenceValue + ") "); + break; + default: + throw new ResponseException(ErrorType.BadRequestData, "Invalid georel operator: " + georelOp); + } + return sqlWhere.toString(); + } + +} diff --git a/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceIntervalNotificationHandler.java b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceIntervalNotificationHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..c4b23c228b224719733367126ba4a65988e00a3a --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceIntervalNotificationHandler.java @@ -0,0 +1,67 @@ +package eu.neclab.ngsildbroker.registryhandler.service; + +import java.util.HashMap; +import java.util.List; +import java.util.Timer; +import java.util.TimerTask; + +import com.google.common.collect.ArrayListMultimap; + +import eu.neclab.ngsildbroker.commons.datatypes.CSourceNotification; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.interfaces.CSourceNotificationHandler; +import eu.neclab.ngsildbroker.commons.tools.EntityTools; + +public class CSourceIntervalNotificationHandler { + + private ArrayListMultimap id2Data = ArrayListMultimap.create(); + + private HashMap id2TimerTask = new HashMap(); + private Timer executor = new Timer(true); + + + private CSourceNotificationHandler notificationHandler; + + public CSourceIntervalNotificationHandler(CSourceNotificationHandler notificationHandler) { + this.notificationHandler = notificationHandler; + } + + + public void addSub(Subscription sub, long interval) { + + TimerTask temp = new TimerTask() { + + @Override + public void run() { + List data; + synchronized (id2Data) { + data = id2Data.removeAll(sub.getId().toString()); + } + if (data != null) { + List notifications = EntityTools.squashCSourceNotifications(data); + for(CSourceNotification notification: notifications) { + notificationHandler.notify(notification, sub); + } + } + + } + + }; + id2TimerTask.put(sub.getId().toString(), temp); + executor.schedule(temp, 0, interval); + } + + + public void removeSub(String subId) { + id2TimerTask.get(subId).cancel(); + id2TimerTask.remove(subId); + id2Data.removeAll(subId); + } + + public void notify(CSourceNotification notification, Subscription sub) { + synchronized (id2Data) { + id2Data.put(sub.getId().toString(), notification); + } + + } +} diff --git a/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceNotificationHandlerInternalKafka.java b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceNotificationHandlerInternalKafka.java new file mode 100644 index 0000000000000000000000000000000000000000..e7a5e50db4fbb71a4a6f04740486c422c6702947 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceNotificationHandlerInternalKafka.java @@ -0,0 +1,44 @@ +package eu.neclab.ngsildbroker.registryhandler.service; + +import java.util.ArrayList; + +import eu.neclab.ngsildbroker.commons.datatypes.CSourceNotification; +import eu.neclab.ngsildbroker.commons.datatypes.CSourceRegistration; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.interfaces.CSourceNotificationHandler; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.registryhandler.config.CSourceProducerChannel; + + +public class CSourceNotificationHandlerInternalKafka implements CSourceNotificationHandler{ + + private KafkaOps kafkaOps; + private CSourceProducerChannel cSourceProducerChannel; + + + public CSourceNotificationHandlerInternalKafka(KafkaOps kafkaOps, CSourceProducerChannel cSourceProducerChannel) { + this.kafkaOps = kafkaOps; + this.cSourceProducerChannel = cSourceProducerChannel; + } + + @Override + public void notify(CSourceNotification notification, Subscription sub) { + byte[] id = sub.getId().toString().getBytes(); + ArrayList temp = new ArrayList(); + for(CSourceRegistration regInfo: notification.getData()) { + temp.add(regInfo.getEndpoint().toString()); + } + + byte[] body = DataSerializer.toJson(temp).getBytes(); + try { + this.kafkaOps.pushToKafka(cSourceProducerChannel.csourceNotificationWriteChannel(),id, body); + } catch (ResponseException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + +} diff --git a/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceNotificationHandlerREST.java b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceNotificationHandlerREST.java new file mode 100644 index 0000000000000000000000000000000000000000..24bc97275ceab342f14a60ca8f2d0032f3523dd0 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceNotificationHandlerREST.java @@ -0,0 +1,56 @@ +package eu.neclab.ngsildbroker.registryhandler.service; + +import java.io.IOException; +import java.util.HashMap; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.stereotype.Service; + +import eu.neclab.ngsildbroker.commons.datatypes.CSourceNotification; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.interfaces.CSourceNotificationHandler; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; + +@Service +public class CSourceNotificationHandlerREST implements CSourceNotificationHandler{ + + private final static Logger logger = LogManager.getLogger(CSourceNotificationHandlerREST.class); + + + + private ContextResolverBasic contextResolver; + HttpUtils httpUtils; + + public CSourceNotificationHandlerREST(ContextResolverBasic contextResolver) { + this.contextResolver = contextResolver; + httpUtils = HttpUtils.getInstance(contextResolver); + } + + @Override + public void notify(CSourceNotification notification, Subscription sub) { + String regString = DataSerializer.toJson(notification); + //TODO rework when storage of sub context is done + //regString = contextResolver.simplify(regString, contextResolver.getContextAsSet(sub.getId().toString()), true).getSimplifiedCompletePayload(); + HashMap addHeaders = new HashMap(); + if(sub.getNotification().getEndPoint().getAccept() != null) { + addHeaders.put("accept", sub.getNotification().getEndPoint().getAccept()); + } + try { + httpUtils.doPost(sub.getNotification().getEndPoint().getUri(), regString, addHeaders); + } catch (IOException e) { + logger.error("Failed to send notification to endpoint " + sub.getNotification().getEndPoint().getUri()); + } + + + } + + + + + + + +} diff --git a/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceService.java b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceService.java new file mode 100644 index 0000000000000000000000000000000000000000..8e46e3dee0a4f473aea8ee2d6d50138adce52f93 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceService.java @@ -0,0 +1,364 @@ +package eu.neclab.ngsildbroker.registryhandler.service; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Timer; +import java.util.TimerTask; +import java.util.concurrent.ArrayBlockingQueue; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.kafka.support.KafkaHeaders; +import org.springframework.messaging.Message; +import org.springframework.messaging.MessageChannel; +import org.springframework.messaging.handler.annotation.Payload; +import org.springframework.messaging.handler.annotation.SendTo; +import org.springframework.stereotype.Service; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.neclab.ngsildbroker.commons.datatypes.CSourceRegistration; +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.enums.TriggerReason; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.ngsiqueries.QueryParser; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.registryhandler.config.CSourceProducerChannel; +import eu.neclab.ngsildbroker.registryhandler.config.StartupConfig; +import eu.neclab.ngsildbroker.registryhandler.controller.RegistryController; +import eu.neclab.ngsildbroker.registryhandler.repository.CSourceDAO; + +@Service +public class CSourceService { + + private final static Logger logger = LoggerFactory.getLogger(RegistryController.class); + // public static final Gson GSON = DataSerializer.GSON; + + @Value("${bootstrap.servers}") + String BOOTSTRAP_SERVERS; + + @Autowired + @Qualifier("rmops") + KafkaOps operations; + @Autowired + ObjectMapper objectMapper; + + @Autowired + StartupConfig startupConfig; + + @Autowired + @Qualifier("rmcsourcedao") + CSourceDAO csourceDAO; + + @Autowired + CSourceSubscriptionService csourceSubService; + + @Autowired + @Qualifier("rmqueryParser") + QueryParser queryParser; + + @Value("${csource.source.topic}") + String CSOURCE_TOPIC; + + private final CSourceProducerChannel producerChannels; + + HashMap regId2TimerTask = new HashMap(); + Timer watchDog = new Timer(true); + private ArrayBlockingQueue workQueue = new ArrayBlockingQueue(50000, true); + + ThreadPoolExecutor executor = new ThreadPoolExecutor(20, 50, 600000, TimeUnit.MILLISECONDS, workQueue); + CSourceService(CSourceProducerChannel producerChannels) { + + this.producerChannels = producerChannels; + } + + public List getCSourceRegistrations() throws ResponseException, IOException, Exception { + logger.trace("getAll() ::"); + Map records = operations.pullFromKafka(this.CSOURCE_TOPIC); + Map entityMap = new HashMap(); + JsonNode entityJsonBody = objectMapper.createObjectNode(); + byte[] result = null; + String key = null; + + for (String recordKey : records.keySet()) { + result = records.get(recordKey); + key = recordKey; + entityJsonBody = objectMapper.readTree(result); + if (!entityJsonBody.isNull()) + entityMap.put(key, entityJsonBody); + } + return new ArrayList(entityMap.values()); + } + + // private List getParamsList(String types) { + // if (types != null) { + // return Stream.of(types.split(",")).collect(Collectors.toList()); + // } + // return null; + // } + // + // private boolean filterForParams(List params, String matchEntity) { + // if (params == null) + // return true; + // if (!params.contains(matchEntity)) { + // return false; + // } + // return true; + // } + // + // private boolean filterForParamsPatterns(List idPatterns, String + // matchEntity) { + // if (idPatterns == null) + // return true; + // for (String idPattern : idPatterns) { + // if (Pattern.compile(idPattern).matcher(matchEntity).matches()) { + // return true; + // } + // } + // return false; + // } + + public CSourceRegistration getCSourceRegistrationById(String registrationId) throws ResponseException, Exception { + if (registrationId == null) { + throw new ResponseException(ErrorType.BadRequestData); + } + byte[] csourceBytes = operations.getMessage(registrationId, this.CSOURCE_TOPIC); + if (csourceBytes == null) { + throw new ResponseException(ErrorType.NotFound); + } + JsonNode entityJsonBody = objectMapper.createObjectNode(); + entityJsonBody = objectMapper.readTree(csourceBytes); + if (entityJsonBody.isNull()) { + throw new ResponseException(ErrorType.NotFound); + } + return DataSerializer.getCSourceRegistration(objectMapper.writeValueAsString(entityJsonBody)); + } + + public boolean updateCSourceRegistration(String registrationId, String payload) + throws ResponseException, Exception { + MessageChannel messageChannel = producerChannels.csourceWriteChannel(); + if (registrationId == null) { + throw new ResponseException(ErrorType.BadRequestData); + } + byte[] csourceBytes = operations.getMessage(registrationId, this.CSOURCE_TOPIC); + if (csourceBytes == null) { + throw new ResponseException(ErrorType.NotFound); + } + // original message in kafka. + JsonNode entityJsonBody = objectMapper.createObjectNode(); + entityJsonBody = objectMapper.readTree(csourceBytes); + CSourceRegistration prevCSourceRegistration = DataSerializer.getCSourceRegistration(entityJsonBody.toString()); + logger.debug("Previous CSource Registration:: " + prevCSourceRegistration); + + CSourceRegistration updateCS = DataSerializer.getCSourceRegistration(payload); + + CSourceRegistration newCSourceRegistration = prevCSourceRegistration.update(updateCS); + + synchronized (this) { + TimerTask task = regId2TimerTask.get(registrationId.toString()); + if (task != null) { + task.cancel(); + } + this.csourceTimerTask(newCSourceRegistration); + } + csourceSubService.checkSubscriptions(prevCSourceRegistration, newCSourceRegistration); + this.operations.pushToKafka(messageChannel, registrationId.getBytes(), + DataSerializer.toJson(newCSourceRegistration).getBytes()); + handleFed(); + return true; + } + + private void handleFed() { + new Thread() { + public void run() { + startupConfig.handleUpdatedTypesForFed(); + }; + }.start(); + } + + public URI registerCSource(CSourceRegistration csourceRegistration) throws ResponseException, Exception { + MessageChannel messageChannel = producerChannels.csourceWriteChannel(); + String id; + URI idUri = csourceRegistration.getId(); + if (idUri == null) { + idUri = generateUniqueRegId(csourceRegistration); + csourceRegistration.setId(idUri); + + } + id = idUri.toString(); + + if (csourceRegistration.getType() == null) { + logger.error("Invalid type!"); + throw new ResponseException(ErrorType.BadRequestData); + } + if (!isValidURL(csourceRegistration.getEndpoint().toString())) { + logger.error("Invalid endpoint URL!"); + throw new ResponseException(ErrorType.BadRequestData); + } + if (csourceRegistration.getInformation() == null) { + logger.error("Information is empty!"); + throw new ResponseException(ErrorType.BadRequestData); + } + if (csourceRegistration.getExpires() != null && !isValidFutureDate(csourceRegistration.getExpires())) { + logger.error("Invalid expire date!"); + throw new ResponseException(ErrorType.BadRequestData); + } +//TODO replace this with a database only attempt + if (this.operations.isMessageExists(id, this.CSOURCE_TOPIC)) { + byte[] messageBytes = this.operations.getMessage(id, this.CSOURCE_TOPIC); + JsonNode messgeJson = objectMapper.createObjectNode(); + messgeJson = objectMapper.readTree(messageBytes); + if (!messgeJson.isNull()) { + throw new ResponseException(ErrorType.AlreadyExists); + } + } + + // TODO: [check for valid identifier (id)] + operations.pushToKafka(messageChannel, id.getBytes(), DataSerializer.toJson(csourceRegistration).getBytes()); + + this.csourceTimerTask(csourceRegistration); + if (!csourceRegistration.isInternal()) { + csourceSubService.checkSubscriptions(csourceRegistration, TriggerReason.newlyMatching); + } + handleFed(); + return idUri; + } + + private URI generateUniqueRegId(CSourceRegistration csourceRegistration) { + + try { + + String key = "urn:ngsi-ld:csourceregistration:" + csourceRegistration.hashCode(); + while (this.operations.isMessageExists(key, this.CSOURCE_TOPIC)) { + key = key + "1"; + } + return new URI(key); + } catch (URISyntaxException e) { + // Left empty intentionally should never happen + throw new AssertionError(); + } + } + + public void csourceTimerTask(CSourceRegistration csourceReg) { + if (csourceReg.getExpires() != null) { + TimerTask cancel = new TimerTask() { + @Override + public void run() { + try { + synchronized (this) { + deleteCSourceRegistration(csourceReg.getId().toString()); + } + } catch (Exception e) { + logger.error("Timer Task -> Exception while expiring residtration :: ", e); + } + } + }; + regId2TimerTask.put(csourceReg.getId().toString(), cancel); + watchDog.schedule(cancel, csourceReg.getExpires() - System.currentTimeMillis()); + } + } + + public boolean deleteCSourceRegistration(String registrationId) throws ResponseException, Exception { + MessageChannel messageChannel = producerChannels.csourceWriteChannel(); + if (registrationId == null) { + throw new ResponseException(ErrorType.BadRequestData); + } + byte[] originalJson = this.operations.getMessage(registrationId, this.CSOURCE_TOPIC); + if (originalJson == null) + throw new ResponseException(ErrorType.NotFound); + CSourceRegistration csourceRegistration = objectMapper.readValue(originalJson, CSourceRegistration.class); + this.csourceSubService.checkSubscriptions(csourceRegistration, TriggerReason.noLongerMatching); + this.operations.pushToKafka(messageChannel, registrationId.getBytes(), "null".getBytes()); + handleFed(); + return true; + // TODO: [push to other DELETE TOPIC] + } + + // for testing + // @StreamListener(CSourceConsumerChannel.csourceReadChannel) +// @KafkaListener(topics = "${csource.source.topic}", groupId = "regmanger") +// public void handleEntityCreate(Message message) { +// String payload = new String((byte[]) message.getPayload()); +// String key = operations.getMessageKey(message); +// logger.debug("key received ::::: " + key); +// logger.debug("Received message: {} :::: " + payload); +// } + + @KafkaListener(topics = "${csource.registry.topic}", groupId = "regmanger") // (CSourceConsumerChannel.contextRegistryReadChannel) + public void handleEntityRegistration(Message message) { + executor.execute(new Thread() { + @Override + public void run() { + + CSourceRegistration csourceRegistration = DataSerializer + .getCSourceRegistration(new String((byte[]) message.getPayload())); + // objectMapper.readValue((byte[]) message.getPayload(), + // CSourceRegistration.class); + csourceRegistration.setInternal(true); + try { + registerCSource(csourceRegistration); + } catch (Exception e) { + logger.trace("Failed to register csource " + csourceRegistration.getId().toString(), e); + } + } + }); + } + + private static boolean isValidURL(String urlString) { + URL url; + try { + url = new URL(urlString); + url.toURI(); + return true; + } catch (Exception e) { + // put logger + } + return false; + } + + // return true for future date validation + private boolean isValidFutureDate(Long date) { + + return System.currentTimeMillis() < date; + } + + @KafkaListener(topics = "${csource.query.topic}", groupId = "csourceQueryHandler") + @SendTo + // @SendTo("QUERY_RESULT") // for tests without QueryManager + public byte[] handleContextQuery(@Payload byte[] message) throws Exception { + logger.trace("handleContextQuery() :: started"); + String payload = new String((byte[]) message); + logger.debug("Received message: " + payload); + String resultPayload = ""; + try { + QueryParams qp = DataSerializer.getQueryParams(payload); + List csourceList = csourceDAO.queryExternalCsources(qp); + resultPayload = csourceDAO.getListAsJsonArray(csourceList); + } catch (Exception e) { + e.printStackTrace(); + } + logger.trace("Pushing result to Kafka... "); + logger.trace("handleContextQuery() :: completed"); + return resultPayload.getBytes(); + } + +} diff --git a/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceSubscriptionService.java b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceSubscriptionService.java new file mode 100644 index 0000000000000000000000000000000000000000..49ad5855fad568612b23c62d342d1ba06b07979b --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceSubscriptionService.java @@ -0,0 +1,822 @@ +package eu.neclab.ngsildbroker.registryhandler.service; + +import static eu.neclab.ngsildbroker.commons.constants.NGSIConstants.GEO_REL_CONTAINS; +import static eu.neclab.ngsildbroker.commons.constants.NGSIConstants.GEO_REL_DISJOINT; +import static eu.neclab.ngsildbroker.commons.constants.NGSIConstants.GEO_REL_EQUALS; +import static eu.neclab.ngsildbroker.commons.constants.NGSIConstants.GEO_REL_INTERSECTS; +import static eu.neclab.ngsildbroker.commons.constants.NGSIConstants.GEO_REL_NEAR; +import static eu.neclab.ngsildbroker.commons.constants.NGSIConstants.GEO_REL_OVERLAPS; +import static eu.neclab.ngsildbroker.commons.constants.NGSIConstants.GEO_REL_WITHIN; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import javax.annotation.PostConstruct; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.bouncycastle.util.Arrays; +import org.locationtech.spatial4j.SpatialPredicate; +import org.locationtech.spatial4j.context.jts.JtsSpatialContext; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeFactory.PolygonBuilder; +import org.locationtech.spatial4j.shape.jts.JtsShapeFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.messaging.Message; +import org.springframework.stereotype.Service; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.github.filosganga.geogson.model.Geometry; +import com.github.filosganga.geogson.model.Point; +import com.github.filosganga.geogson.model.Polygon; +import com.github.filosganga.geogson.model.positions.SinglePosition; +import com.google.common.collect.ArrayListMultimap; +import com.google.gson.JsonParseException; +import com.netflix.discovery.EurekaClient; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.commons.constants.KafkaConstants; +import eu.neclab.ngsildbroker.commons.datatypes.CSourceNotification; +import eu.neclab.ngsildbroker.commons.datatypes.CSourceRegistration; +import eu.neclab.ngsildbroker.commons.datatypes.EntityInfo; +import eu.neclab.ngsildbroker.commons.datatypes.GeoRelation; +import eu.neclab.ngsildbroker.commons.datatypes.Information; +import eu.neclab.ngsildbroker.commons.datatypes.LDGeoQuery; +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.datatypes.SubscriptionRequest; +import eu.neclab.ngsildbroker.commons.enums.ErrorType; +import eu.neclab.ngsildbroker.commons.enums.TriggerReason; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.interfaces.CSourceNotificationHandler; +import eu.neclab.ngsildbroker.commons.ldcontext.ContextResolverBasic; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.commons.tools.EntityTools; +import eu.neclab.ngsildbroker.registryhandler.config.CSourceProducerChannel; + +@Service +public class CSourceSubscriptionService { + private final static Logger logger = LogManager.getLogger(CSourceSubscriptionService.class); + + private final byte[] nullArray = "null".getBytes(); + + @Autowired + @Qualifier("rmops") + KafkaOps kafkaOps; + + @Autowired + ObjectMapper objectMapper; + + @Autowired + @Qualifier("rmconRes") + ContextResolverBasic contextResolverService; + + @Autowired + EurekaClient eurekaClient; + + @Autowired + CSourceService cSourceService; + + CSourceNotificationHandler notificationHandler; + CSourceNotificationHandler internalNotificationHandler; + + private final CSourceProducerChannel producerChannel; + + JtsShapeFactory shapeFactory = JtsSpatialContext.GEO.getShapeFactory(); + + HashMap subscriptionId2Subscription = new HashMap(); + ArrayListMultimap idBasedSubscriptions = ArrayListMultimap.create(); + ArrayListMultimap typeBasedSubscriptions = ArrayListMultimap.create(); + ArrayListMultimap idPatternBasedSubscriptions = ArrayListMultimap.create(); + HashMap remoteNotifyCallbackId2InternalSub = new HashMap(); + // HashMap subId2HashNotificationData = new HashMap(); + @Value("${bootstrap.servers}") + String BOOTSTRAP_SERVERS; + + // @Value("${notification.port}") + // String REMOTE_NOTIFICATION_PORT; + + /* + * Map props = new HashMap(); private + * SubscriptionManagerProducerChannel producerChannel; { // Make configurable + * props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS); + * props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, + * ByteArrayDeserializer.class); + * props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + * ByteArrayDeserializer.class); props.put(ConsumerConfig.GROUP_ID_CONFIG, + * UUID.randomUUID().toString()); props.put(JsonDeserializer.TRUSTED_PACKAGES, + * "*"); } + */ + + public CSourceSubscriptionService(CSourceProducerChannel producerChannel) { + this.producerChannel = producerChannel; + // loadStoredSubscriptions(); + } + + @PostConstruct + private void loadStoredSubscriptions() { + // this.contextResolver = + // ContextResolverService.getInstance(producerChannel.atContextWriteChannel(), + // kafkaOps); + this.notificationHandler = new CSourceNotificationHandlerREST(contextResolverService); + this.internalNotificationHandler = new CSourceNotificationHandlerInternalKafka(kafkaOps, producerChannel); + logger.trace("call loadStoredSubscriptions() ::"); + Map subs = kafkaOps.pullFromKafka(KafkaConstants.CSOURCE_SUBSCRIPTIONS_TOPIC); + for (byte[] sub : subs.values()) { + try { + if (Arrays.areEqual(sub, nullArray)) { + continue; + } + SubscriptionRequest subscriptionRequest = DataSerializer.getSubscriptionRequest(new String(sub)); + subscribe(subscriptionRequest, false); + } catch (JsonParseException e) { + logger.error("Exception ::", e); + e.printStackTrace(); + continue; + } catch (ResponseException e) { + logger.error("Exception ::", e); + e.printStackTrace(); + continue; + } + } + subs = kafkaOps.pullFromKafka(KafkaConstants.SUBSCRIPTIONS_TOPIC); + for (byte[] sub : subs.values()) { + try { + if (Arrays.areEqual(sub, nullArray)) { + continue; + } + SubscriptionRequest subscriptionRequest = DataSerializer.getSubscriptionRequest(new String(sub)); + subscriptionRequest.getSubscription().setInternal(true); + subscribe(subscriptionRequest, false); + } catch (JsonParseException e) { + // logger.error("Exception ::", e); + // e.printStackTrace(); + continue; + } catch (ResponseException e) { + // logger.error("Exception ::", e); + // e.printStackTrace(); + continue; + } + } + + } + + public Subscription querySubscription(URI id) throws ResponseException { + if (subscriptionId2Subscription.containsKey(id)) { + return subscriptionId2Subscription.get(id); + } else { + throw new ResponseException(ErrorType.NotFound); + } + + } + + public URI subscribe(SubscriptionRequest subscriptionRequest) throws ResponseException { + return subscribe(subscriptionRequest, true); + } + + public URI subscribe(SubscriptionRequest subscriptionRequest, boolean sync) throws ResponseException { + Subscription subscription = subscriptionRequest.getSubscription(); + if (subscription.getId() == null) { + subscription.setId(generateUniqueSubId(subscription)); + } else { + if (this.subscriptionId2Subscription.containsKey(subscription.getId())) { + throw new ResponseException(ErrorType.AlreadyExists); + } + } + this.subscriptionId2Subscription.put(subscription.getId(), subscription); + for (EntityInfo info : subscription.getEntities()) { + if (info.getId() != null) { + idBasedSubscriptions.put(info.getId().toString(), subscription); + } else if (info.getType() != null) { + typeBasedSubscriptions.put(info.getType(), subscription); + } else if (info.getIdPattern() != null) { + idPatternBasedSubscriptions.put(info.getIdPattern(), subscription); + } + + } + if (sync) { + syncToMessageBus(subscriptionRequest); + } + if (subscription.isInternal()) { + new Thread() { + public void run() { + generateInitialNotification(subscription); + } + }.start(); + } + return subscription.getId(); + } + + private void generateInitialNotification(Subscription subscription) { + List registrations; + + try { + registrations = cSourceService.getCSourceRegistrations(); + + for (JsonNode reg : registrations) { + CSourceRegistration regEntry = DataSerializer + .getCSourceRegistration(objectMapper.writeValueAsString(reg)); + if (!regEntry.isInternal()) { + CSourceNotification notifyEntry = generateNotificationEntry(regEntry, subscription, + TriggerReason.newlyMatching); + if (notifyEntry != null) { + internalNotificationHandler.notify(notifyEntry, subscription); + } + } + } + } catch (Exception e) { + logger.error("Failed to get initial notification from registry"); + logger.error(e); + } + + } + + private void syncToMessageBus(SubscriptionRequest subscriptionRequest) throws ResponseException { + String id = subscriptionRequest.getSubscription().getId().toString(); + if (!this.kafkaOps.isMessageExists(id, KafkaConstants.CSOURCE_SUBSCRIPTIONS_TOPIC)) { + this.kafkaOps.pushToKafka(producerChannel.csourceSubscriptionWriteChannel(), id.getBytes(), + DataSerializer.toJson(subscriptionRequest).getBytes()); + } + } + + private URI generateUniqueSubId(Subscription subscription) { + + try { + return new URI("urn:ngsi-ld:CSourceSubscription:" + subscription.hashCode()); + } catch (URISyntaxException e) { + logger.error("Exception ::", e); + // Left empty intentionally should never happen + throw new AssertionError(); + } + } + + public boolean unsubscribe(URI id) throws ResponseException { + Subscription removedSub = this.subscriptionId2Subscription.remove(id); + if (removedSub == null) { + throw new ResponseException(ErrorType.NotFound); + } + for (EntityInfo info : removedSub.getEntities()) { + if (info.getId() != null) { + idBasedSubscriptions.remove(info.getId().toString(), removedSub); + } else if (info.getType() != null) { + typeBasedSubscriptions.remove(info.getType(), removedSub); + } else if (info.getIdPattern() != null) { + idPatternBasedSubscriptions.remove(info.getIdPattern(), removedSub); + } + } + + this.kafkaOps.pushToKafka(this.producerChannel.csourceSubscriptionWriteChannel(), id.toString().getBytes(), + "null".getBytes()); + return true; + } + + public Subscription updateSubscription(Subscription subscription) throws ResponseException { + Subscription oldSub = subscriptionId2Subscription.get(subscription.getId()); + if (oldSub == null) { + throw new ResponseException(ErrorType.NotFound); + } + if (subscription.getAttributeNames() != null) { + oldSub.setAttributeNames(subscription.getAttributeNames()); + } + if (subscription.getDescription() != null) { + oldSub.setDescription(subscription.getDescription()); + } + if (subscription.getEntities() != null && !subscription.getEntities().isEmpty()) { + oldSub.setEntities(subscription.getEntities()); + } + if (subscription.getExpires() != null) { + oldSub.setExpires(subscription.getExpires()); + } + if (subscription.getLdGeoQuery() != null) { + oldSub.setLdGeoQuery(subscription.getLdGeoQuery()); + } + if (subscription.getLdQuery() != null) { + oldSub.setLdQuery(subscription.getLdQuery()); + } + if (subscription.getLdTempQuery() != null) { + oldSub.setLdTempQuery(subscription.getLdTempQuery()); + } + if (subscription.getNotification() != null) { + oldSub.setNotification(subscription.getNotification()); + } + if (subscription.getThrottling() != 0) { + oldSub.setThrottling(subscription.getThrottling()); + } + if (subscription.getTimeInterval() != 0) { + oldSub.setTimeInterval(subscription.getTimeInterval()); + } + return oldSub; + } + + public List getAllSubscriptions(int limit) { + List result = new ArrayList(); + for (Subscription sub : subscriptionId2Subscription.values()) { + if (!sub.isInternal()) { + result.add(sub); + } + } + if (limit > 0) { + if (limit < result.size()) { + result = result.subList(0, limit); + } + } + return result; + } + + public Subscription getSubscription(URI subscriptionId) throws ResponseException { + if (subscriptionId2Subscription.containsKey(subscriptionId)) { + return subscriptionId2Subscription.get(subscriptionId); + } else { + throw new ResponseException(ErrorType.NotFound); + } + + } + + public void checkSubscriptions(CSourceRegistration cSourceRegistration, TriggerReason triggerReason) { + new Thread() { + @Override + public void run() { + HashSet subsToCheck = new HashSet(); + for (Information info : cSourceRegistration.getInformation()) { + for (EntityInfo entityInfo : info.getEntities()) { + + if (entityInfo.getId() != null) { + subsToCheck.addAll(idBasedSubscriptions.get(entityInfo.getId().toString())); + } + subsToCheck.addAll(typeBasedSubscriptions.get(entityInfo.getType())); + if (entityInfo.getId() != null) { + subsToCheck.addAll(getPatternBasedSubs(entityInfo.getId().toString())); + } + if (entityInfo.getIdPattern() != null) { + subsToCheck.addAll(getSubsForIdPattern(entityInfo.getIdPattern())); + } + + } + + } + for (Subscription sub : subsToCheck) { + CSourceNotification notifyEntry = generateNotificationEntry(cSourceRegistration, sub, + triggerReason); + if (notifyEntry != null) { + new Thread() { + @Override + public void run() { + if (sub.isInternal()) { + internalNotificationHandler.notify(notifyEntry, sub); + } else { + notificationHandler.notify(notifyEntry, sub); + } + + } + }.start(); + } + + } + + } + }.start(); + + } + + private Collection getSubsForIdPattern(String idPattern) { + ArrayList result = new ArrayList(); + for (String pattern : idPatternBasedSubscriptions.keySet()) { + if (idPattern.matches(pattern)) { + result.addAll(idPatternBasedSubscriptions.get(pattern)); + } + } + + for (String id : idBasedSubscriptions.keySet()) { + if (id.matches(idPattern)) { + result.addAll(idBasedSubscriptions.get(id)); + } + } + return result; + } + + private CSourceNotification generateNotificationEntry(CSourceRegistration regEntry, Subscription subscription, + TriggerReason triggerReason) { + if (subscription.getLdGeoQuery() != null && regEntry.getLocation() != null) { + if (subscription.isInternal()) { + if (!evaluateRegGeoQuery(subscription.getLdGeoQuery(), regEntry.getLocation())) { + return null; + } + } else { + if (!evaluateGeoQuery(subscription.getLdGeoQuery(), regEntry.getLocation())) { + return null; + } + } + } + + CSourceRegistration reg = new CSourceRegistration(); + reg.setDescription(regEntry.getDescription()); + reg.setName(regEntry.getName()); + reg.setEndpoint(regEntry.getEndpoint()); + reg.setExpires(regEntry.getExpires()); + reg.setId(regEntry.getId()); + reg.setLocation(regEntry.getLocation()); + reg.setTimestamp(regEntry.getTimestamp()); + reg.setType(regEntry.getType()); + ArrayList temp = new ArrayList(); + for (Information info : regEntry.getInformation()) { + + Information newInfo = new Information(); + temp.add(newInfo); + ArrayList newEntityInfos = new ArrayList<>(); + newInfo.setEntities(newEntityInfos); + for (EntityInfo regEntityInfo : info.getEntities()) { + for (EntityInfo subEntityInfo : subscription.getEntities()) { + if (!subEntityInfo.getType().equals(regEntityInfo.getType())) { + continue; + } + if (subEntityInfo.getId() != null) { + // id match + if ((regEntityInfo.getId() != null && subEntityInfo.getId().equals(regEntityInfo.getId())) + || (regEntityInfo.getIdPattern() != null + && subEntityInfo.getId().toString().matches(regEntityInfo.getIdPattern()))) { + addAttribMatch(info, regEntityInfo, subscription, newEntityInfos, newInfo); + } + + } else if (subEntityInfo.getIdPattern() != null && ((regEntityInfo.getId() != null + && regEntityInfo.getId().toString().matches(subEntityInfo.getIdPattern())) + || (regEntityInfo.getIdPattern() != null + && regEntityInfo.getIdPattern().matches(subEntityInfo.getIdPattern())))) { + // regex match + addAttribMatch(info, regEntityInfo, subscription, newEntityInfos, newInfo); + } else { + // type match + addAttribMatch(info, regEntityInfo, subscription, newEntityInfos, newInfo); + } + } + + } + } + reg.setInformation(temp); + ArrayList data = new ArrayList(); + data.add(reg); + try { + return new CSourceNotification(EntityTools.getRandomID("csourcenotify"), subscription.getId(), + new Date(System.currentTimeMillis()), triggerReason, data, null, null, -1, true); + } catch (URISyntaxException e) { + // Left empty intentionally should never happen + throw new AssertionError(); + } + } + + private void addAttribMatch(Information info, EntityInfo regEntityInfo, Subscription subscription, + ArrayList newEntityInfos, Information newInfo) { + Set props = extractProperties(info, subscription); + Set relations = extractRelationShips(info, subscription); + if (props != null || relations != null) { + newEntityInfos.add(regEntityInfo); + if (props != null) { + newInfo.getProperties().addAll(props); + } + if (relations != null) { + newInfo.getRelationships().addAll(props); + } + } + + } + + private Set extractRelationShips(Information info, Subscription subscription) { + if (info.getRelationships() == null || info.getRelationships().isEmpty() + || subscription.getAttributeNames() == null || subscription.getAttributeNames().isEmpty()) { + return new HashSet(); + } + HashSet result = new HashSet(); + HashSet attribNames = new HashSet(); + attribNames.addAll(subscription.getNotification().getAttributeNames()); + attribNames.addAll(subscription.getAttributeNames()); + for (String relationship : info.getRelationships()) { + if (attribNames.contains(relationship)) { + result.add(relationship); + } + } + if (result.isEmpty()) { + return null; + } + return result; + } + + private Set extractProperties(Information info, Subscription subscription) { + if (info.getProperties() == null || info.getProperties().isEmpty() || subscription.getAttributeNames() == null + || subscription.getAttributeNames().isEmpty()) { + return new HashSet(); + } + HashSet attribNames = new HashSet(); + attribNames.addAll(subscription.getNotification().getAttributeNames()); + attribNames.addAll(subscription.getAttributeNames()); + HashSet result = new HashSet(); + for (String property : info.getProperties()) { + if (attribNames.contains(property)) { + result.add(property); + } + } + if (result.isEmpty()) { + return null; + } + return result; + } + + private boolean evaluateGeoQuery(LDGeoQuery geoQuery, Geometry location) { + return evaluateGeoQuery(geoQuery, location, -1); + } + + private boolean evaluateGeoQuery(LDGeoQuery geoQuery, Geometry location, double expandArea) { + + if (geoQuery == null) { + return true; + } + + String relation = geoQuery.getGeoRelation().getRelation(); + List coordinates = geoQuery.getCoordinates(); + + if (location == null) { + return false; + } + + if (GEO_REL_EQUALS.equals(relation)) { + if (location instanceof Point) { + List geoValueAsList = java.util.Arrays.asList(((Point) location).lon(), + ((Point) location).lat()); + return geoValueAsList.equals(geoQuery.getCoordinates()); + } else { + + return false; + } + } else { + Shape entityShape; + if (location instanceof Point) { + entityShape = shapeFactory.pointXY(((Point) location).lon(), ((Point) location).lat()); + } else if (location instanceof Polygon) { + PolygonBuilder polygonBuilder = shapeFactory.polygon(); + Iterator it = ((Polygon) location).positions().children().iterator().next().children() + .iterator(); + while (it.hasNext()) { + polygonBuilder.pointXY(((SinglePosition) it).coordinates().getLon(), + ((SinglePosition) it).coordinates().getLat()); + } + entityShape = polygonBuilder.build(); + } else { + logger.error("Unsupported GeoJson type. Currently Point and Polygon are supported."); + return false; + } + Shape queryShape; + switch (geoQuery.getGeometry()) { + case Point: { + queryShape = shapeFactory.pointXY(coordinates.get(0), coordinates.get(1)); + break; + } + case Polygon: { + PolygonBuilder polygonBuilder = shapeFactory.polygon(); + for (int i = 0; i < coordinates.size(); i = i + 2) { + polygonBuilder.pointXY(coordinates.get(i), coordinates.get(i + 1)); + } + + queryShape = polygonBuilder.build(); + break; + } + default: { + return false; + } + } + if (GEO_REL_CONTAINS.equals(relation)) { + return SpatialPredicate.Contains.evaluate(entityShape, queryShape); + } else if (GEO_REL_DISJOINT.equals(relation)) { + return SpatialPredicate.IsDisjointTo.evaluate(entityShape, queryShape); + } else if (GEO_REL_INTERSECTS.equals(relation)) { + if (expandArea != -1) { + queryShape = queryShape.getBuffered(expandArea, queryShape.getContext()); + } + return SpatialPredicate.Intersects.evaluate(entityShape, queryShape); + } else if (GEO_REL_NEAR.equals(relation)) { + Shape bufferedShape = queryShape.getBuffered(geoQuery.getGeoRelation().getMaxDistanceAsDouble(), + queryShape.getContext()); + if (geoQuery.getGeoRelation().getMaxDistance() != null) { + return SpatialPredicate.IsWithin.evaluate(entityShape, bufferedShape); + } else if (geoQuery.getGeoRelation().getMinDistance() != null) { + return !SpatialPredicate.IsWithin.evaluate(entityShape, bufferedShape); + } else { + return false; + } + + } else if (GEO_REL_OVERLAPS.equals(relation)) { + return SpatialPredicate.Overlaps.evaluate(entityShape, queryShape); + } else if (GEO_REL_WITHIN.equals(relation)) { + if (expandArea != -1) { + queryShape = queryShape.getBuffered(expandArea, queryShape.getContext()); + } + return SpatialPredicate.IsWithin.evaluate(entityShape, queryShape); + } else { + return false; + } + } + + } + + private Collection getPatternBasedSubs(String key) { + ArrayList result = new ArrayList(); + for (String pattern : idPatternBasedSubscriptions.keySet()) { + if (key.matches(pattern)) { + result.addAll(idPatternBasedSubscriptions.get(pattern)); + } + } + return result; + } + + @KafkaListener(topics = "${submanager.subscription.topic}", groupId = "csourcemanager") + public void handleInternalSub(Message message) { + if (Arrays.areEqual(AppConstants.NULL_BYTES, message.getPayload())) { + try { + unsubscribe(new URI(kafkaOps.getMessageKey(message))); + } catch (ResponseException e) { + logger.error(e); + } catch (URISyntaxException e) { + logger.error(e); + } + } else { + SubscriptionRequest internalSub = DataSerializer.getSubscriptionRequest(new String(message.getPayload())); + internalSub.getSubscription().setInternal(true); + try { + subscribe(internalSub); + } catch (ResponseException e) { + logger.error(e); + } + } + } + + private boolean evaluateRegGeoQuery(LDGeoQuery subGeoQuery, Geometry geoValue) { + + LDGeoQuery regGeoQuery = new LDGeoQuery(); + regGeoQuery.setCoordinates(subGeoQuery.getCoordinates()); + regGeoQuery.setGeometry(subGeoQuery.getGeometry()); + regGeoQuery.setGeoProperty(subGeoQuery.getGeoProperty()); + GeoRelation newRel = new GeoRelation(); + regGeoQuery.setGeoRelation(newRel); + GeoRelation origRel = subGeoQuery.getGeoRelation(); + String origRelString = origRel.getRelation(); + if (origRelString.equals(GEO_REL_NEAR)) { + if (origRel.getMinDistance() != null) { + newRel.setRelation(GEO_REL_WITHIN); + return !evaluateGeoQuery(regGeoQuery, geoValue, origRel.getMinDistanceAsDouble()); + } else if (origRel.getMaxDistance() != null) { + newRel.setRelation(GEO_REL_INTERSECTS); + return evaluateGeoQuery(regGeoQuery, geoValue, origRel.getMaxDistanceAsDouble()); + } + } else if (origRelString.equals(GEO_REL_CONTAINS)) { + newRel.setRelation(origRelString); + return evaluateGeoQuery(regGeoQuery, geoValue); + } else if (origRelString.equals(GEO_REL_WITHIN)) { + newRel.setRelation(GEO_REL_CONTAINS); + return evaluateGeoQuery(regGeoQuery, geoValue); + } else if (origRelString.equals(GEO_REL_INTERSECTS)) { + newRel.setRelation(origRelString); + return evaluateGeoQuery(regGeoQuery, geoValue); + } else if (origRelString.equals(GEO_REL_EQUALS)) { + newRel.setRelation(GEO_REL_CONTAINS); + return evaluateGeoQuery(regGeoQuery, geoValue); + } else if (origRelString.equals(GEO_REL_DISJOINT)) { + newRel.setRelation(GEO_REL_WITHIN); + return !evaluateGeoQuery(regGeoQuery, geoValue); + } else if (origRelString.equals(GEO_REL_OVERLAPS)) { + newRel.setRelation(GEO_REL_CONTAINS); + if (evaluateGeoQuery(regGeoQuery, geoValue)) { + return true; + } + newRel.setRelation(GEO_REL_WITHIN); + return evaluateGeoQuery(regGeoQuery, geoValue); + + } + + return false; + + } + + // TODO this is potentially slow as hell so figure out a better way to do + // this!!! + public boolean checkSubscriptions(CSourceRegistration prevCSourceRegistration, + CSourceRegistration newCSourceRegistration) { + new Thread() { + @Override + public void run() { + HashMap oldNotification = new HashMap(); + HashMap newNotification = new HashMap(); + + HashSet prevSubsToCheck = new HashSet(); + for (Information info : prevCSourceRegistration.getInformation()) { + for (EntityInfo entityInfo : info.getEntities()) { + + if (entityInfo.getId() != null) { + prevSubsToCheck.addAll(idBasedSubscriptions.get(entityInfo.getId().toString())); + } + prevSubsToCheck.addAll(typeBasedSubscriptions.get(entityInfo.getType())); + prevSubsToCheck.addAll(getPatternBasedSubs(entityInfo.getId().toString())); + if (entityInfo.getIdPattern() != null) { + prevSubsToCheck.addAll(getSubsForIdPattern(entityInfo.getIdPattern())); + } + + } + + } + + HashSet newSubsToCheck = new HashSet(); + for (Information info : prevCSourceRegistration.getInformation()) { + for (EntityInfo entityInfo : info.getEntities()) { + + if (entityInfo.getId() != null) { + newSubsToCheck.addAll(idBasedSubscriptions.get(entityInfo.getId().toString())); + } + newSubsToCheck.addAll(typeBasedSubscriptions.get(entityInfo.getType())); + newSubsToCheck.addAll(getPatternBasedSubs(entityInfo.getId().toString())); + if (entityInfo.getIdPattern() != null) { + newSubsToCheck.addAll(getSubsForIdPattern(entityInfo.getIdPattern())); + } + + } + + } + + for (Subscription sub : prevSubsToCheck) { + + CSourceNotification notifyEntry = generateNotificationEntry(prevCSourceRegistration, sub, null); + if (notifyEntry != null) { + oldNotification.put(sub, notifyEntry); + } + } + for (Subscription sub : newSubsToCheck) { + + CSourceNotification notifyEntry = generateNotificationEntry(newCSourceRegistration, sub, null); + if (notifyEntry != null) { + newNotification.put(sub, notifyEntry); + } + } + TriggerReason trigger; + for (Entry entry : newNotification.entrySet()) { + Subscription sub = entry.getKey(); + if (oldNotification.containsKey(sub)) { + if (oldNotification.hashCode() == entry.getValue().hashCode()) { + // no changes for sub -> no notification + continue; + } + // updated notification + trigger = TriggerReason.updated; + } else { + // new notification + trigger = TriggerReason.newlyMatching; + } + entry.getValue().setTriggerReason(trigger); + new Thread() { + @Override + public void run() { + if (sub.isInternal()) { + internalNotificationHandler.notify(entry.getValue(), sub); + } else { + notificationHandler.notify(entry.getValue(), sub); + } + + } + }.start(); + } + + for (Entry entry : oldNotification.entrySet()) { + if (!newNotification.containsKey(entry.getKey())) { + // deleted notification + CSourceNotification deleteNotification = new CSourceNotification(entry.getValue().getId(), + entry.getValue().getSubscriptionId(), new Date(System.currentTimeMillis()), + TriggerReason.noLongerMatching, null, null, null, 0, true); + new Thread() { + @Override + public void run() { + if (entry.getKey().isInternal()) { + internalNotificationHandler.notify(deleteNotification, entry.getKey()); + } else { + notificationHandler.notify(deleteNotification, entry.getKey()); + } + + } + }.start(); + } + } + } + }.start(); + return true; + } + +} diff --git a/scorpio-broker/Registry/RegistryManager/src/main/resources/.application.properties.swp b/scorpio-broker/Registry/RegistryManager/src/main/resources/.application.properties.swp new file mode 100644 index 0000000000000000000000000000000000000000..bb05be940c596c19df54b7929ceab8c99d0b69f3 Binary files /dev/null and b/scorpio-broker/Registry/RegistryManager/src/main/resources/.application.properties.swp differ diff --git a/scorpio-broker/Registry/RegistryManager/src/main/resources/application-aaio.yml b/scorpio-broker/Registry/RegistryManager/src/main/resources/application-aaio.yml new file mode 100644 index 0000000000000000000000000000000000000000..c4c4eec6f923624de4e8651e66e9f712e999324a --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/resources/application-aaio.yml @@ -0,0 +1,41 @@ +server: + port: 1030 + +broker: + parent: + location: + url: SELF + id: FedBroker1 + # geoCoverage: {\\"type\\":\\"Polygon\\",\\"coordinates\\": [ [ [ 8.28643798828125, 49.60715036117516 ], [ 8.399047851562498, 49.60715036117516 ], [ 8.399047851562498, 49.664961282899974 ], [ 8.28643798828125, 49.664961282899974 ], [ 8.28643798828125, 49.60715036117516 ] ] ] } + +bootstrap: + servers: kafka:9092 + +atcontext: + url: http://gateway:9090/ngsi-ld/contextes/ + +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +spring: + kafka: + consumer: + bootstrap-servers: kafka:9092 + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + datasource: + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_registrymanager + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 diff --git a/scorpio-broker/Registry/RegistryManager/src/main/resources/application-aio.yml b/scorpio-broker/Registry/RegistryManager/src/main/resources/application-aio.yml new file mode 100644 index 0000000000000000000000000000000000000000..09f8deaecc5c5acf1fe5def12106678698c6dfd2 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/resources/application-aio.yml @@ -0,0 +1,41 @@ +server: + port: 1030 + +broker: + parent: + location: + url: SELF + id: FedBroker1 + # geoCoverage: {\\"type\\":\\"Polygon\\",\\"coordinates\\": [ [ [ 8.28643798828125, 49.60715036117516 ], [ 8.399047851562498, 49.60715036117516 ], [ 8.399047851562498, 49.664961282899974 ], [ 8.28643798828125, 49.664961282899974 ], [ 8.28643798828125, 49.60715036117516 ] ] ] } + +bootstrap: + servers: localhost:9092 + +atcontext: + url: http://localhost:9090/ngsi-ld/contextes/ + +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +spring: + kafka: + consumer: + bootstrap-servers: localhost:9092 + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + datasource: + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_registrymanager + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 diff --git a/scorpio-broker/Registry/RegistryManager/src/main/resources/application-dist.yml b/scorpio-broker/Registry/RegistryManager/src/main/resources/application-dist.yml new file mode 100644 index 0000000000000000000000000000000000000000..ffebb1573110b245bc607d4cb187b9361d66780e --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/resources/application-dist.yml @@ -0,0 +1,41 @@ +server: + port: 1030 + +broker: + parent: + location: + url: SELF + id: FedBroker1 + # geoCoverage: {\\"type\\":\\"Polygon\\",\\"coordinates\\": [ [ [ 8.28643798828125, 49.60715036117516 ], [ 8.399047851562498, 49.60715036117516 ], [ 8.399047851562498, 49.664961282899974 ], [ 8.28643798828125, 49.664961282899974 ], [ 8.28643798828125, 49.60715036117516 ] ] ] } + +bootstrap: + servers: kafka:9092 + +atcontext: + url: http://gateway:9090/ngsi-ld/contextes/ + +eureka: + client: + serviceUrl: + defaultZone: http://eureka:8761/eureka/ + +spring: + kafka: + consumer: + bootstrap-servers: kafka:9092 + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + datasource: + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_registrymanager + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 diff --git a/scorpio-broker/Registry/RegistryManager/src/main/resources/application.yml b/scorpio-broker/Registry/RegistryManager/src/main/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..be8bda092cbad42f8415e681039b5b699c773167 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/resources/application.yml @@ -0,0 +1,118 @@ + +server: + port: 1030 +spring: + application: + name: c-sources + main: + lazy-initialization: true + kafka: + admin: + properties: + cleanup: + policy: compact + flyway: + baselineOnMigrate: true + datasource: + url: "jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_registrymanager" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + bindings: + CSOURCE_REGISTRATION_WRITE_CHANNEL: + destination: CONTEXT_SOURCE + contentType: application/json + CSOURCE_NOTIFICATION_WRITE_CHANNEL: + destination: CONTEXT_SOURCE_NOTIFICATION + contentType: application/json +# CSOURCE_REGISTRATION_READ_CHANNEL: +# destination: CONTEXT_SOURCE +# contentType: application/json +# group: csource +# CONTEXT_REGISTRY_READ_CHANNEL: +# destination: CONTEXT_REGISTRY +# contentType: application/json +# group: csourceregistry +# CONTEXT_UPDATE_READ_CHANNEL: +# destination: CONTEXT_UPDATE +# contentType: application/json +# group: csourceupdate + ATCONTEXT_WRITE_CHANNEL: + destination: ATCONTEXT + contentType: application/json + CSOURCE_SUBSCRIPTION_WRITE_CHANNEL: + destination: CSOURCE_SUBSCRIPTIONS + contentType: application/json + +#C-Source topics properties +csource: + registry: + topic: CONTEXT_REGISTRY + source: + topic: CONTEXT_SOURCE + notification: + topic: CONTEXT_SOURCE_NOTIFICATION + query: + topic: CONTEXT_REGISTRY_QUERY + result: + topic: CONTEXT_REGISTRY_QUERY_RESULT + +submanager: + subscription: + topic: SUBSCRIPTIONS + + +#kafka broker path +bootstrap: + servers: localhost:9092 + +#enable log compaction + + +# (default) spring.datasource.type=com.zaxxer.hikari.HikariDataSource + + + + +management: + endpoints: + web: + exposure: + include: "*" + endpoint: + restart: + enabled: true + +atcontext: + url: http://localhost:9090/ngsi-ld/contextes/ + + +broker: + id: FedBroker1 + parent: + location: + url: SELF + + + + +#broker.customEndpoint= +#broker.geoCoverage={\\"type\\":\\"Polygon\\",\\"coordinates\\": [[[100.0, 0.0],[101.0, 0.0],[101.0, 1.0],[100.0, 1.0],[100.0, 0.0] ] ]} +#broker.geoCoverage={\\"type\\":\\"Polygon\\",\\"coordinates\\": [ [ [ 8.34686279296875, 49.298262740098345 ], [ 8.850860595703125, 49.298262740098345 ], [ 8.850860595703125, 49.55283460376055 ], [ 8.34686279296875, 49.55283460376055 ], [ 8.34686279296875, 49.298262740098345 ] ] ] } +#broker.geoCoverage={\\"type\\":\\"Polygon\\",\\"coordinates\\": [ [ [ 8.28643798828125, 49.60715036117516 ], [ 8.399047851562498, 49.60715036117516 ], [ 8.399047851562498, 49.664961282899974 ], [ 8.28643798828125, 49.664961282899974 ], [ 8.28643798828125, 49.60715036117516 ] ] ] } + +#SELF in case of Fedaration broker + +#broker.parent.location.url=http://10.0.4.33:9090/ngsi-ld/v1/csourceRegistrations/ +#broker.parent.location.url=http://cema.nlehd.de:2095/ngsi-ld/v1/csourceRegistrations/ \ No newline at end of file diff --git a/scorpio-broker/Registry/RegistryManager/src/main/resources/log4j2-spring.xml b/scorpio-broker/Registry/RegistryManager/src/main/resources/log4j2-spring.xml new file mode 100644 index 0000000000000000000000000000000000000000..97aec844eaffd3f2f94f2b846a8a0654fcc9b6ca --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/main/resources/log4j2-spring.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + %d %p %C{1.} [%t] %m%n + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/RegistryHandlerTest.java b/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/RegistryHandlerTest.java new file mode 100644 index 0000000000000000000000000000000000000000..807776dcca610cdb86f740bdc7c181d7923bf1e0 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/RegistryHandlerTest.java @@ -0,0 +1,14 @@ +package eu.neclab.ngsildbroker.registryhandler; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest(properties= {"spring.main.allow-bean-definition-overriding=true"}) +public class RegistryHandlerTest { + @Test + public void contextLoads() { + } +} diff --git a/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/controller/RegistryControllerTest.java b/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/controller/RegistryControllerTest.java new file mode 100644 index 0000000000000000000000000000000000000000..5df74abd2a585540b516e550156c8610517e667c --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/controller/RegistryControllerTest.java @@ -0,0 +1,123 @@ +package eu.neclab.ngsildbroker.registryhandler.controller; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*; +import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.net.URI; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.web.servlet.MockMvc; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.registryhandler.service.CSourceService; + +@SpringBootTest(properties = { "spring.main.allow-bean-definition-overriding=true" }) +@RunWith(SpringRunner.class) +//@WebMvcTest +@AutoConfigureMockMvc(secure = false) +public class RegistryControllerTest { + + @Autowired + private MockMvc mockMvc; + @MockBean + private CSourceService csourceService; + + private String payload; + private String updatePayload; + private String CORE_CONTEXT_URL_STR = "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld"; + + @Before + public void setup() { + // @formatter:off + payload = "{\r\n" + " \"id\": \"urn:ngsi-ld:ContextSourceRegistration:csr1a3458\",\r\n" + + " \"type\": \"ContextSourceRegistration\",\r\n" + " \"name\": \"NameExample\",\r\n" + + " \"description\": \"DescriptionExample\",\r\n" + " \"information\": [{\r\n" + + " \"entities\": [{\r\n" + " \"id\": \"urn:ngsi-ld:Vehicle:A456\",\r\n" + + " \"type\": \"Vehicle\"\r\n" + " }],\r\n" + " \"properties\": [\"brandName\",\r\n" + + " \"speed\"],\r\n" + " \"relationships\": [\"isParked\"]\r\n" + " },\r\n" + " {\r\n" + + " \"entities\": [{\r\n" + " \"idPattern\": \".*downtown$\",\r\n" + + " \"type\": \"OffStreetParking\"\r\n" + " }]\r\n" + " }],\r\n" + + " \"endpoint\": \"http://my.csource.org:1026\",\r\n" + + " \"location\": \"{ \\\"type\\\": \\\"Polygon\\\", \\\"coordinates\\\": [[[8.686752319335938,49.359122687528746],[8.742027282714844,49.3642654834877],[8.767433166503904,49.398462568451485],[8.768119812011719,49.42750021620163],[8.74305725097656,49.44781634951542],[8.669242858886719,49.43754770762113],[8.63525390625,49.41968407776289],[8.637657165527344,49.3995797187007],[8.663749694824219,49.36851347448498],[8.686752319335938,49.359122687528746]]] }\",\r\n" + + " \"timestamp\": {\r\n" + " \"start\": \"2017-11-29T14:53:15Z\"\r\n" + " },\r\n" + + " \"expires\": \"2030-11-29T14:53:15Z\",\r\n" + + " \"@context\": [ \""+CORE_CONTEXT_URL_STR+"\",\r\n" + + " {\r\n" + " \"Vehicle\": \"http://example.org/vehicle/Vehicle\",\r\n" + + " \"brandName\": \"http://example.org/vehicle/brandName\",\r\n" + + " \"speed\": \"http://example.org/vehicle/speed\",\r\n" + + " \"OffStreetParking\": \"http://example.org/parking/OffStreetParking\",\r\n" + + " \"isParked\": {\r\n" + " \"@type\": \"@id\",\r\n" + + " \"@id\": \"http://example.org/common/isParked\"\r\n" + " }\r\n" + " }]\r\n" + "}"; + // @formatter:on + } + + @After + public void teardown() { + payload = null; + updatePayload = null; + } + + @Test + public void registerCSourceTest() { + try { + when(csourceService.registerCSource(any())) + .thenReturn(new URI("urn:ngsi-ld:ContextSourceRegistration:csr1a3458")); + mockMvc.perform(post("/ngsi-ld/v1/csourceRegistrations/").contentType(AppConstants.NGB_APPLICATION_JSONLD).content(payload)) + .andExpect(status().isCreated()) + .andExpect(redirectedUrl("/ngsi-ld/v1/csourceRegistrations/urn:ngsi-ld:ContextSourceRegistration:csr1a3458")) + .andDo(print()); + + verify(csourceService, times(1)).registerCSource(any()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + e.printStackTrace(); + } + + } + + @Test + public void updateCSourceTest() { + try { + when(csourceService.updateCSourceRegistration("urn:ngsi-ld:ContextSourceRegistration:csr1a3458", + updatePayload)).thenReturn(true); + mockMvc.perform(patch("/ngsi-ld/v1/csourceRegistrations/{registrationId}", "urn:ngsi-ld:ContextSourceRegistration:csr1a3458") + .contentType(AppConstants.NGB_APPLICATION_JSONLD).content(payload)) + .andExpect(status().isNoContent()).andDo(print()); + + verify(csourceService, times(1)).updateCSourceRegistration(any(), any()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + e.printStackTrace(); + } + } + + @Test + public void deleteCsourceTest() { + try { + when(csourceService.deleteCSourceRegistration("urn:ngsi-ld:ContextSourceRegistration:csr1a3458")) + .thenReturn(true); + mockMvc.perform(delete("/ngsi-ld/v1/csourceRegistrations/{registrationId}", "urn:ngsi-ld:ContextSourceRegistration:csr1a3458") + .contentType(AppConstants.NGB_APPLICATION_JSONLD)).andExpect(status().isNoContent()).andDo(print()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + e.printStackTrace(); + } + } + +} diff --git a/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/controller/RegistrySubscriptionControllerTest.java b/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/controller/RegistrySubscriptionControllerTest.java new file mode 100644 index 0000000000000000000000000000000000000000..1de19533c2c3cfaae81cb45e5458288edb0ac7d8 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/controller/RegistrySubscriptionControllerTest.java @@ -0,0 +1,122 @@ +package eu.neclab.ngsildbroker.registryhandler.controller; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; +import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import java.net.URI; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.web.servlet.MockMvc; + +import eu.neclab.ngsildbroker.commons.constants.AppConstants; +import eu.neclab.ngsildbroker.registryhandler.service.CSourceSubscriptionService; + +@SpringBootTest(properties = { "spring.main.allow-bean-definition-overriding=true" }) +@RunWith(SpringRunner.class) +@AutoConfigureMockMvc//(secure = false) +public class RegistrySubscriptionControllerTest { + + @Autowired + private MockMvc mockMvc; + @MockBean + CSourceSubscriptionService csourceSubsService; + + private String payload; + @SuppressWarnings("unused") + // TODO use or remove + private String updatePayload; + + @Before + public void setup() { + + // @formatter:off + + payload = "{\r\n" + " \"id\": \"urn:ngsi-ld:Subscription:7\",\r\n" + " \"type\": \"Subscription\",\r\n" + + " \"entities\": [{\r\n" + " \"type\": \"Vehicle\"\r\n" + " }],\r\n" + + " \"watchedAttributes\": [\"http://example.org/vehicle/brandName\"],\r\n" + + " \"q\":\"http://example.org/vehicle/brandName!=Mercedes\",\r\n" + " \"notification\": {\r\n" + + " \"attributes\": [\"http://example.org/vehicle/brandName\"],\r\n" + + " \"format\": \"keyValues\",\r\n" + " \"endpoint\": {\r\n" + + " \"uri\": \"http://my.endpoint.org/notify\",\r\n" + " \"accept\": \"application/json\"\r\n" + + " }\r\n" + " }\r\n" + "}"; + + updatePayload = "{\r\n" + " \"id\": \"urn:ngsi-ld:Subscription:7\",\r\n" + " \"type\": \"Subscription\",\r\n" + + " \"entities\": [{\r\n" + " \"type\": \"Vehicle\"\r\n" + " }],\r\n" + + " \"watchedAttributes\": [\"http://example.org/vehicle/brandName2\"],\r\n" + + " \"q\": \"http://example.org/vehicle/brandName2!=Mercedes\",\r\n" + " \"notification\": {\r\n" + + " \"attributes\": [\"http://example.org/vehicle/brandName2\"],\r\n" + + " \"format\": \"keyValues\",\r\n" + " \"endpoint\": {\r\n" + + " \"uri\": \"http://my.endpoint.org/notify\",\r\n" + + " \"accept\": \"application/json\"\r\n" + " }\r\n" + " }\r\n" + "}"; + // @formatter:on + } + + @After + public void after() { + payload = null; + } + + @Test + public void subscribeRestTest() { + try { + when(csourceSubsService.subscribe(any())).thenReturn(new URI("urn:ngsi-ld:Subscription:7")); + mockMvc.perform(post("/ngsi-ld/v1/csourceSubscriptions/").contentType(AppConstants.NGB_APPLICATION_JSONLD) + .content(payload)).andExpect(status().isCreated()) + .andExpect(redirectedUrl("/ngsi-ld/v1/csourceSubscriptions/urn:ngsi-ld:Subscription:7")).andDo(print()); + + verify(csourceSubsService, times(1)).subscribe(any()); + } catch (Exception e) { + e.printStackTrace(); + Assert.fail(e.getMessage()); + + } + + } + + @Test + public void updateSubscriptionTest() { + try { + when(csourceSubsService.updateSubscription(any())).thenReturn(any()); + mockMvc.perform(patch("/ngsi-ld/v1/csourceSubscriptions/{id}", "urn:ngsi-ld:Subscription:7") + .contentType(AppConstants.NGB_APPLICATION_JSONLD).content(payload)) + .andExpect(status().isNoContent()).andDo(print()); + + verify(csourceSubsService, times(1)).updateSubscription(any()); + } catch (Exception e) { + e.printStackTrace(); + Assert.fail(e.getMessage()); + + } + } + + @Test + public void unsubscribeTest() { + try { + when(csourceSubsService.unsubscribe(any())).thenReturn(true); + mockMvc.perform(delete("/ngsi-ld/v1/csourceSubscriptions/{id}", "urn:ngsi-ld:Subscription:7") + .contentType(AppConstants.NGB_APPLICATION_JSONLD).content(payload)) + .andExpect(status().isNoContent()).andDo(print()); + verify(csourceSubsService, times(1)).unsubscribe(any()); + } catch (Exception e) { + Assert.fail(e.getMessage()); + e.printStackTrace(); + } + } +} diff --git a/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceServiceTest.java b/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceServiceTest.java new file mode 100644 index 0000000000000000000000000000000000000000..78aa7deee612df2ea1154fb5444a490697f3d917 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceServiceTest.java @@ -0,0 +1,176 @@ +package eu.neclab.ngsildbroker.registryhandler.service; + +import static org.mockito.ArgumentMatchers.any; + +import java.io.IOException; +import java.net.URI; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.test.context.junit4.SpringRunner; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +import eu.neclab.ngsildbroker.commons.datatypes.CSourceRegistration; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.registryhandler.config.CSourceProducerChannel; + +@SpringBootTest +@RunWith(SpringRunner.class) +public class CSourceServiceTest { + + + + @Mock + KafkaOps operations; + @Mock + CSourceProducerChannel producerChannels; + @InjectMocks + CSourceService csourceService; + @MockBean + ObjectMapper objectMapper; + @Mock + CSourceSubscriptionService csourceSubService; + + + private CSourceRegistration csourceReg; + private CSourceRegistration updateCSourceReg; + String payload; + String updatePayload; + + JsonNode blankNode; + JsonNode payloadNode; + + @Before + public void setup() throws IOException { + MockitoAnnotations.initMocks(this); + ObjectMapper objectMapper = new ObjectMapper(); + //@formatter:off + payload="{\r\n" + + " \"https://uri.etsi.org/ngsi-ld/endpoint\": [{\r\n" + + " \"@value\": \"http://my.csource.org:1026\"\r\n" + + " }],\r\n" + + " \"@id\": \"urn:ngsi-ld:ContextSourceRegistration:csr1a3456\",\r\n" + + " \"https://uri.etsi.org/ngsi-ld/information\": [{\r\n" + + " \"https://uri.etsi.org/ngsi-ld/entities\": [{\r\n" + + " \"@id\": \"urn:ngsi-ld:Vehicle:A456\",\r\n" + + " \"@type\": [\"https://json-ld.org/playground/Vehicle\"]\r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/properties\": [{\r\n" + + " \"@id\": \"https://json-ld.org/playground/brandName\"\r\n" + + " },\r\n" + + " {\r\n" + + " \"@id\": \"https://json-ld.org/playground/speed\"\r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/relationships\": [{\r\n" + + " \"@id\": \"https://json-ld.org/playground/isParked\"\r\n" + + " }]\r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/location\": [{\r\n" + + " \"@value\": \"{ \\\"type\\\":\\\"Polygon\\\", \\\"coordinates\\\": [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0],[100.0, 1.0], [100.0, 0.0] ] ]}\"\r\n" + + " }],\r\n" + + " \"@type\": [\"https://uri.etsi.org/ngsi-ld/ContextSourceRegistration\"]\r\n" + + "}"; + + updatePayload="{\r\n" + + " \"https://uri.etsi.org/ngsi-ld/endpoint\": [{\r\n" + + " \"@value\": \"http://my.csource.org:1026\"\r\n" + + " }],\r\n" + + " \"@id\": \"urn:ngsi-ld:ContextSourceRegistration:csr1a3456\",\r\n" + + " \"https://uri.etsi.org/ngsi-ld/information\": [{\r\n" + + " \"https://uri.etsi.org/ngsi-ld/entities\": [{\r\n" + + " \"@id\": \"urn:ngsi-ld:Vehicle:A456\",\r\n" + + " \"@type\": [\"https://json-ld.org/playground/Vehicle\"]\r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/properties\": [{\r\n" + + " \"@id\": \"https://json-ld.org/playground/brandName\"\r\n" + + " },\r\n" + + " {\r\n" + + " \"@id\": \"https://json-ld.org/playground/speed\"\r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/relationships\": [{\r\n" + + " \"@id\": \"https://json-ld.org/playground/isParked\"\r\n" + + " }]\r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/location\": [{\r\n" + + " \"@value\": \"{ \\\"type\\\":\\\"Polygon\\\", \\\"coordinates\\\": [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0],[100.0, 1.0], [100.0, 0.0] ] ]}\"\r\n" + + " }],\r\n" + + " \"@type\": [\"https://uri.etsi.org/ngsi-ld/ContextSourceRegistration\"]\r\n" + + "}"; + //@formatter:on + csourceReg = DataSerializer.getCSourceRegistration(payload); + updateCSourceReg = DataSerializer.getCSourceRegistration(updatePayload); + + payloadNode = objectMapper.readTree(payload.getBytes()); + } + + @After + public void teardown() { + payload = null; + updatePayload = null; + csourceReg = null; + updateCSourceReg = null; + blankNode = null; + } + + @Test + public void registerCSourceTest(){ + try { + csourceReg.setInternal(true); + Mockito.doReturn(false).when(operations).isMessageExists(any(), any()); + URI uri = csourceService.registerCSource(csourceReg); + Assert.assertEquals(uri, new URI("urn:ngsi-ld:ContextSourceRegistration:csr1a3456")); + }catch(Exception ex) { + Assert.fail(); + } + } + + @Test + public void updateCSourceTest() { + try { + byte[] payloadBytes = payload.getBytes(); + + updateCSourceReg.setInternal(false); + Mockito.doReturn(payloadBytes).when(operations).getMessage(any(), any()); + Mockito.doReturn(blankNode).when(objectMapper).createObjectNode(); + Mockito.doReturn(payloadNode).when(objectMapper).readTree(any(byte[].class)); + Mockito.doReturn(true).when(csourceSubService).checkSubscriptions(any(CSourceRegistration.class), + any(CSourceRegistration.class)); + + boolean result = csourceService.updateCSourceRegistration("urn:ngsi-ld:ContextSourceRegistration:csr1a3456", + updatePayload); + + Assert.assertTrue(result); + }catch(Exception ex) { + Assert.fail(); + } + } + + @Test + public void deleteCSorceTest() throws Exception { + try { + byte[] payloadBytes = payload.getBytes(); + Mockito.doReturn(payloadBytes).when(operations).getMessage(any(), any()); + Mockito.doReturn(csourceReg).when(objectMapper).readValue(any(byte[].class), + Mockito.eq(CSourceRegistration.class)); + + boolean result = csourceService.deleteCSourceRegistration("urn:ngsi-ld:ContextSourceRegistration:csr1a3456"); + + Assert.assertTrue(result); + }catch(Exception ex) { + Assert.fail(); + } + } + +} diff --git a/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceSubscriptionServiceTest.java b/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceSubscriptionServiceTest.java new file mode 100644 index 0000000000000000000000000000000000000000..d858d0d34f18ca1c2645142b19a594454e255c80 --- /dev/null +++ b/scorpio-broker/Registry/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceSubscriptionServiceTest.java @@ -0,0 +1,122 @@ +package eu.neclab.ngsildbroker.registryhandler.service; + +import static org.mockito.ArgumentMatchers.any; + +import java.net.URI; +import java.util.ArrayList; +import java.util.List; + +import org.junit.After; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +import eu.neclab.ngsildbroker.commons.datatypes.Subscription; +import eu.neclab.ngsildbroker.commons.datatypes.SubscriptionRequest; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.stream.service.KafkaOps; +import eu.neclab.ngsildbroker.registryhandler.config.CSourceProducerChannel; + +@SpringBootTest +@RunWith(SpringRunner.class) +public class CSourceSubscriptionServiceTest { + + @Mock + KafkaOps operations; + @Mock + CSourceProducerChannel producerChannels; + @InjectMocks + CSourceSubscriptionService csourceSubs; + + String subsPayload; + Subscription subs; + + @Before + public void setup() { + MockitoAnnotations.initMocks(this); + subsPayload="{\r\n" + + " \"https://uri.etsi.org/ngsi-ld/entities\": [{\r\n" + + " \"@type\": [\"http://example.org/vehicle/Vehicle\"]\r\n" + + " }],\r\n" + + " \"@id\": \"urn:ngsi-ld:Subscription:7\",\r\n" + + " \"https://uri.etsi.org/ngsi-ld/notification\": [{\r\n" + + " \"https://uri.etsi.org/ngsi-ld/attributes\": [{\r\n" + + " \"@id\": \"http://example.org/vehicle/brandName\"\r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/endpoint\": [{\r\n" + + " \"https://uri.etsi.org/ngsi-ld/accept\": [{\r\n" + + " \"@value\": \"application/json\"\r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/uri\": [{\r\n" + + " \"@value\": \"http://my.endpoint.org/notify\"\r\n" + + " }]\r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/format\": [{\r\n" + + " \"@value\": \"keyValues\"\r\n" + + " }]\r\n" + + " }],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/q\": [{\r\n" + + " \"@value\": \"http://example.org/vehicle/brandName!=Mercedes\"\r\n" + + " }],\r\n" + + " \"@type\": [\"https://uri.etsi.org/ngsi-ld/Subscription\"],\r\n" + + " \"https://uri.etsi.org/ngsi-ld/watchedAttributes\": [{\r\n" + + " \"@id\": \"http://example.org/vehicle/brandName\"\r\n" + + " }]\r\n" + + "}"; + + subs=DataSerializer.getSubscription(subsPayload); + } + + @After + public void teardown() { + subsPayload=null; + subs=null; + } + + @Test + public void subscribeTest() { + try { + Mockito.doReturn(true).when(operations).isMessageExists(any(), any()); + URI uri=csourceSubs.subscribe(new SubscriptionRequest(subs, null)); + + Assert.assertEquals(uri, new URI("urn:ngsi-ld:Subscription:7")); + }catch(Exception ex) { + Assert.fail(); + } + } + + + @Test + public void unSubscribeTest() throws Exception { + try { + csourceSubs.subscribe(new SubscriptionRequest(subs, null)); + Assert.assertTrue(csourceSubs.unsubscribe(new URI("urn:ngsi-ld:Subscription:7"))); + }catch(Exception ex) { + Assert.fail(); + } + } + + @Test + public void updateSubTest() throws Exception { + try { + csourceSubs.subscribe(new SubscriptionRequest(subs, null)); + Subscription newSub=subs; + List watchedAttrib=new ArrayList<>(); + watchedAttrib.add("http://example.org/vehicle/brandName2"); + newSub.setAttributeNames(watchedAttrib); + + Subscription updatedSub=csourceSubs.updateSubscription(newSub); + + Assert.assertEquals("http://example.org/vehicle/brandName2", updatedSub.getAttributeNames().get(0)); + }catch(Exception ex) { + Assert.fail(); + } + } +} diff --git a/scorpio-broker/Registry/pom.xml b/scorpio-broker/Registry/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..0d28133ff6dfd5879c7cc72869c1ac1568350aa2 --- /dev/null +++ b/scorpio-broker/Registry/pom.xml @@ -0,0 +1,16 @@ + + 4.0.0 + Registry + pom + + eu.neclab.ngsildbroker + BrokerParent + 1.0.0-SNAPSHOT + ../BrokerParent + + 1.0.0-SNAPSHOT + + RegistryManager + + diff --git a/scorpio-broker/RegistryManager/target/classes/.application.properties.swp b/scorpio-broker/RegistryManager/target/classes/.application.properties.swp new file mode 100644 index 0000000000000000000000000000000000000000..bb05be940c596c19df54b7929ceab8c99d0b69f3 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/classes/.application.properties.swp differ diff --git a/scorpio-broker/RegistryManager/target/classes/META-INF/jandex.idx b/scorpio-broker/RegistryManager/target/classes/META-INF/jandex.idx new file mode 100644 index 0000000000000000000000000000000000000000..ec067ad6fc71f9642d3f33964408ef1bc71c8a2c Binary files /dev/null and b/scorpio-broker/RegistryManager/target/classes/META-INF/jandex.idx differ diff --git a/scorpio-broker/RegistryManager/target/classes/application-activemq.properties b/scorpio-broker/RegistryManager/target/classes/application-activemq.properties new file mode 100644 index 0000000000000000000000000000000000000000..2ba498af8f4f1660cdf0b7410bb4a2297cbd521c --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/application-activemq.properties @@ -0,0 +1,13 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:61616} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=activemq +mysettings.messageconnection.options= +camel.component.activemq.broker-url=${bootstrap.servers} + + +scorpio.messaging.maxSize=1048576 + +mp.messaging.outgoing.registry.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.outgoing.registry.connector=smallrye-camel diff --git a/scorpio-broker/RegistryManager/target/classes/application-kafka.properties b/scorpio-broker/RegistryManager/target/classes/application-kafka.properties new file mode 100644 index 0000000000000000000000000000000000000000..9ed6c726026aef08678bab5ccd51fc7615574dc3 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/application-kafka.properties @@ -0,0 +1,12 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:9092} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +#Kafka settings +scorpio.messaging.maxSize=1048576 +kafka.bootstrap.servers=${bootstrap.servers} + +mp.messaging.outgoing.registry.topic=${scorpio.topics.registry} +mp.messaging.outgoing.registry.connector=smallrye-kafka +#readability block########### + + diff --git a/scorpio-broker/RegistryManager/target/classes/application-mqtt.properties b/scorpio-broker/RegistryManager/target/classes/application-mqtt.properties new file mode 100644 index 0000000000000000000000000000000000000000..f47b37c1961b13d25b70585a0e27ac1edfb076f4 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/application-mqtt.properties @@ -0,0 +1,13 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:1883} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=paho-mqtt5 +mysettings.messageconnection.options= +camel.component.paho-mqtt5.broker-url=tcp://${bootstrap.servers} + +scorpio.messaging.maxSize=268435455 +mp.messaging.outgoing.registry.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.outgoing.registry.connector=smallrye-camel + + diff --git a/scorpio-broker/RegistryManager/target/classes/application-rabbitmq.properties b/scorpio-broker/RegistryManager/target/classes/application-rabbitmq.properties new file mode 100644 index 0000000000000000000000000000000000000000..e32b2f078c0f0f7e2027c54c9ba11144cb8e92ab --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/application-rabbitmq.properties @@ -0,0 +1,14 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:5672} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=rabbitmq +mysettings.messageconnection.options= +#?addresses=localhost:5672 +scorpio.messaging.maxSize=134217728 +mp.messaging.outgoing.registry.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.outgoing.registry.connector=smallrye-camel +camel.component.rabbitmq.hostname=localhost +camel.component.rabbitmq.port-number=5672 + + diff --git a/scorpio-broker/RegistryManager/target/classes/application-sqs.properties b/scorpio-broker/RegistryManager/target/classes/application-sqs.properties new file mode 100644 index 0000000000000000000000000000000000000000..5a880061c67bd35357020b2fe85b159348ffc6fa --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/application-sqs.properties @@ -0,0 +1,13 @@ +#mysettings.kafka.bootstrap.host=${bushost:localhost} +#mysettings.kafka.bootstrap.port=${busport:5672} +#bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +#camel.component.aws2-sqs.maximum-message-size=10485760 +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +mysettings.messageconnection.protocol=sns-fanout +mysettings.messageconnection.options= +#?addresses=localhost:5672 +scorpio.messaging.maxSize=262144 +mp.messaging.outgoing.registry.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.outgoing.registry.connector=smallrye-camel + + diff --git a/scorpio-broker/RegistryManager/target/classes/application.properties b/scorpio-broker/RegistryManager/target/classes/application.properties new file mode 100644 index 0000000000000000000000000000000000000000..03554f4abac068a8c97a243b26c2adc444722852 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/application.properties @@ -0,0 +1,59 @@ +quarkus.application.name=registry-manager +quarkus.http.port=1030 +quarkus.log.level=INFO +quarkus.ssl.native=true +#quarkus.log.category."eu.neclab".level=DEBUG +#quarkus.vertx.event-loops-pool-size=32 +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +mysettings.postgres.host=${dbhost:localhost} +mysettings.postgres.port=${dbport:5432} +mysettings.postgres.username=${dbuser:ngb} +mysettings.postgres.password=${dbpass:ngb} +mysettings.postgres.database-name=${dbname:ngb} +mysettings.gateway.host=${gateway.host:localhost} +mysettings.gateway.port=${gateway.port:9090} +scorpio.at-context-server=http://at-context-server:2023 +atcontext.url=${scorpio.at-context-server}/ngsi-ld/v1/jsonldContexts/ +jdbcurl=jdbc:postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name}?ApplicationName=${quarkus.application.name} +scorpio.gatewayurl=http://localhost:9090 +scorpio.directDB=true +scorpio.registry.max-limit=1000 +scorpio.registry.default-limit=50 +scorpio.registry.autorecording=active +scorpio.registry.batch-operations.create.max=1000 +scorpio.registry.batch-operations.upsert.max=1000 +scorpio.registry.batch-operations.update.max=1000 +scorpio.registry.batch-operations.delete.max=1000 +scorpio.registry.batch-operations.query.max=1000 +scorpio.topics.entity=ENTITY +scorpio.topics.registry=REGISTRY +scorpio.topics.internalnotification=I_NOTIFY +scorpio.topics.internalregsub=I_REGSUB +scorpio.startupdelay=5s +#Database settings +quarkus.datasource.db-kind=postgresql +quarkus.datasource.username=${mysettings.postgres.username} +quarkus.datasource.password=${mysettings.postgres.password} +quarkus.datasource.jdbc.url=${jdbcurl} +quarkus.datasource.reactive.url=postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name} +quarkus.datasource.reactive.shared=true +quarkus.datasource.reactive.max-size=20 +#quarkus.datasource.reactive.name=blabliblub +quarkus.flyway.migrate-at-start=true +quarkus.flyway.baseline-on-migrate=true +quarkus.flyway.connect-retries=10 +quarkus.flyway.repair-at-start=true +selfhostcorecontext=http://localhost:9090/corecontext +ngsild.corecontext=https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context-v1.3.jsonld + +scorpio.federation[0]=name0 +scorpio.federation[0].url=url0 +scorpio.federation[0].sourcetenant=sourcetenant0 +scorpio.federation[0].targettenant=targettenant0 +scorpio.federation[0].regtype=regtype0 +scorpio.federation[1]=name1 +scorpio.federation[1].url=url1 +scorpio.federation[1].sourcetenant=sourcetenant1 +scorpio.federation[1].targettenant=targettenant1 +scorpio.federation[1].regtype=regtype1 +scorpio.fedupdaterate=600s \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20190604.1__entity.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20190604.1__entity.sql new file mode 100644 index 0000000000000000000000000000000000000000..a8259f8bf603988add0925c6985b979640cdc13e --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20190604.1__entity.sql @@ -0,0 +1,57 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS Entity ( + id TEXT NOT NULL, + type TEXT, + data JSONB NOT NULL, + context JSONB, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + observationSpace GEOMETRY(Geometry, 4326), + operationSpace GEOMETRY(Geometry, 4326), + PRIMARY KEY (id)) +; + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER entity_extract_jsonb_fields BEFORE INSERT OR UPDATE ON entity + FOR EACH ROW EXECUTE PROCEDURE entity_extract_jsonb_fields(); + +-- create indexes for performance +CREATE INDEX i_entity_type ON entity (type); +CREATE INDEX i_entity_createdat ON entity (createdat); +CREATE INDEX i_entity_modifiedat ON entity (modifiedat); +CREATE INDEX i_entity_location ON entity USING GIST (location); +CREATE INDEX i_entity_observationspace ON entity USING GIST (observationspace); +CREATE INDEX i_entity_operationspace ON entity USING GIST (operationspace); + +-- to check if this index will be used by the database optimizer, or if it should be applied only for for certain keys +-- check https://www.postgresql.org/docs/current/static/datatype-json.html +CREATE INDEX i_entity_data ON entity USING GIN (data); diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20190604.2__registry.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20190604.2__registry.sql new file mode 100644 index 0000000000000000000000000000000000000000..51d767f1a6ee0368765817ebce039132d1160bd9 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20190604.2__registry.sql @@ -0,0 +1,135 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS csource ( + id TEXT NOT NULL, + data JSONB NOT NULL, + type TEXT, + name TEXT, + description TEXT, + timestamp_start TIMESTAMP, + timestamp_end TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + expires TIMESTAMP, + endpoint TEXT, + internal boolean default false, + has_registrationinfo_with_attrs_only BOOL NOT NULL DEFAULT FALSE, + has_registrationinfo_with_entityinfo_only BOOL NOT NULL DEFAULT FALSE, + PRIMARY KEY (id)) +; + +-- create indexes for performance +CREATE INDEX i_csource_data ON csource USING GIN (data); +CREATE INDEX i_csource_name ON csource (name); +CREATE INDEX i_csource_timestamp_start ON csource (timestamp_start); +CREATE INDEX i_csource_timestamp_end ON csource (timestamp_end); +CREATE INDEX i_csource_location ON csource USING GIST (location); +CREATE INDEX i_csource_expires ON csource (expires); +CREATE INDEX i_csource_endpoint ON csource (endpoint); +CREATE INDEX i_csource_internal ON csource (internal); + +CREATE TABLE IF NOT EXISTS csourceinformation ( + id BIGSERIAL, + csource_id TEXT NOT NULL REFERENCES csource(id) ON DELETE CASCADE ON UPDATE CASCADE, + group_id BIGINT, + entity_id TEXT, + entity_idpattern TEXT, + entity_type TEXT, + property_id TEXT, + relationship_id TEXT, + PRIMARY KEY (id)) +; +CREATE SEQUENCE csourceinformation_group_id_seq OWNED BY csourceinformation.group_id; -- used by csource trigger +-- create indexes for performance +CREATE INDEX i_csourceinformation_csource_id ON csourceinformation (csource_id); +CREATE INDEX i_csourceinformation_entity_type_id_idpattern ON csourceinformation (entity_type, entity_id, entity_idpattern); +CREATE INDEX i_csourceinformation_entity_type_id ON csourceinformation (entity_type, entity_id); +CREATE INDEX i_csourceinformation_entity_type_idpattern ON csourceinformation (entity_type, entity_idpattern); +CREATE INDEX i_csourceinformation_property_id ON csourceinformation (property_id); +CREATE INDEX i_csourceinformation_relationship_id ON csourceinformation (relationship_id); +CREATE INDEX i_csourceinformation_group_property_relationship ON csourceinformation (group_id, property_id, relationship_id); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields_to_information_table(); \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20190604.3__temporal.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20190604.3__temporal.sql new file mode 100644 index 0000000000000000000000000000000000000000..bc9d603a2b3ed51124507c18e77df266bb80c91a --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20190604.3__temporal.sql @@ -0,0 +1,105 @@ +BEGIN; + +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS temporalentity ( + id TEXT NOT NULL, + type TEXT, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + PRIMARY KEY (id)) +; + +CREATE TABLE IF NOT EXISTS temporalentityattrinstance ( + internalid BIGSERIAL, + temporalentity_id TEXT NOT NULL REFERENCES temporalentity(id) ON DELETE CASCADE ON UPDATE CASCADE, + attributeid TEXT NOT NULL, + instanceid TEXT, + attributetype TEXT, + value TEXT, -- object (relationship) is also stored here + geovalue GEOMETRY, + createdat TIMESTAMP, + modifiedat TIMESTAMP, + observedat TIMESTAMP, + data JSONB NOT NULL, + static BOOL NOT NULL, + PRIMARY KEY (internalid)) +; +CREATE UNIQUE INDEX i_temporalentityattrinstance_entityid_attributeid_instanceid ON temporalentityattrinstance (temporalentity_id, attributeid, instanceid); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_extract_jsonb_fields BEFORE INSERT OR UPDATE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_update_static() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + f_count integer; + BEGIN + select into f_internalid, f_count min(internalid), count(1) from temporalentityattrinstance + where temporalentity_id = OLD.temporalentity_id AND attributeid = OLD.attributeid; + IF (f_count = 1) THEN + UPDATE temporalentityattrinstance SET static = true WHERE internalid = f_internalid; + END IF; + RETURN OLD; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_update_static AFTER DELETE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_update_static(); + +-- create indexes for performance + +CREATE INDEX i_temporalentity_type ON temporalentity (type); + +CREATE INDEX i_temporalentityattrinstance_data ON temporalentityattrinstance USING GIN (data); + +COMMIT; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20190611.1__sysattrs.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20190611.1__sysattrs.sql new file mode 100644 index 0000000000000000000000000000000000000000..3e24ff4111eb19227e14f1b3a2b90a2d863f337e --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20190611.1__sysattrs.sql @@ -0,0 +1,7 @@ +-- entity +ALTER TABLE entity ALTER data DROP NOT NULL; +ALTER TABLE entity ADD data_without_sysattrs JSONB; + +-- csource +ALTER TABLE csource ALTER data DROP NOT NULL; +ALTER TABLE csource ADD data_without_sysattrs JSONB; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ab6fe3c66556cde740eaf56948e95d4556fa7c9 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql @@ -0,0 +1,121 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20190703.1__keyvalues.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20190703.1__keyvalues.sql new file mode 100644 index 0000000000000000000000000000000000000000..fae8d021431fadf39732600f684e69f5aa43447a --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20190703.1__keyvalues.sql @@ -0,0 +1 @@ +ALTER TABLE entity ADD kvdata JSONB; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..13b2ff5ba21ce08cd58465e6b7b9240c592f6f5c --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql @@ -0,0 +1,131 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20210206.1__tenant_function.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20210206.1__tenant_function.sql new file mode 100644 index 0000000000000000000000000000000000000000..899626ca4ed38154b7e8344e98e1e0b41459d391 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20210206.1__tenant_function.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20210206.2__tenant_field.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20210206.2__tenant_field.sql new file mode 100644 index 0000000000000000000000000000000000000000..6e5e7a7599f89a684574be098ed4a96d75068c1d --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20210206.2__tenant_field.sql @@ -0,0 +1 @@ +ALTER TABLE csource ADD tenant_id TEXT; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20210206.3__tenant_table.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20210206.3__tenant_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ea65d8e5fd612f8a5f0a3cd20d9ae081aba11f1 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20210206.3__tenant_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS tenant ( + tenant_id TEXT NOT NULL, + database_name varchar(255) UNIQUE, + PRIMARY KEY (tenant_id) +); \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20211217.1__subscription_table.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20211217.1__subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..104b878e08881a8de88364102af8b82ac5cd1a1f --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20211217.1__subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..28f87847b253efcabcac9dc467a64ea1774766fa --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS registry_subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql new file mode 100644 index 0000000000000000000000000000000000000000..b8fc302dd290e0b4a560b3b5bf0c09e5fa0a199a --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql @@ -0,0 +1,163 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}')) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}')) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getGeoJson (ldjson jsonb) +RETURNS jsonb AS $geojson$ +declare + geojson jsonb; +BEGIN + SELECT json_build_object('type', substring(ldjson#>>'{@type,0}' from 32),'coordinates',getCoordinates(ldjson#>'{https://purl.org/geojson/vocab#coordinates,0,@list}')) into geojson; + RETURN geojson; +END; +$geojson$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20220124.1__scope_support.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220124.1__scope_support.sql new file mode 100644 index 0000000000000000000000000000000000000000..40f3e01afad101fbea692822b60923ab63123965 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220124.1__scope_support.sql @@ -0,0 +1,52 @@ +ALTER TABLE public.entity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes,'{-1}', (i#>'{@value}')) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..77f733a8e2015aac5d0c1190fb0b5bbd6256fd24 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..0167acd3afc6a30007b262cef29778be77ec9089 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql @@ -0,0 +1,103 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20220126.1__scope_support_2.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220126.1__scope_support_2.sql new file mode 100644 index 0000000000000000000000000000000000000000..6f7224edef85a212c0e339117292b2fbd78307e1 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220126.1__scope_support_2.sql @@ -0,0 +1,128 @@ +ALTER TABLE public.csource + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +ALTER TABLE public.temporalentity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20220127.1__scope_support_3.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220127.1__scope_support_3.sql new file mode 100644 index 0000000000000000000000000000000000000000..aef923126f490e1683b02763d8cb70eb7f971c26 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220127.1__scope_support_3.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..a27bbc3ad1a40b4e5e7ad176746076c6cace0d70 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql new file mode 100644 index 0000000000000000000000000000000000000000..7710a0ee88d8dfd878acef4b862d42c051bb0d56 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}'), true) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}'), true) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql new file mode 100644 index 0000000000000000000000000000000000000000..6b5247225608c9e0224d3e823dcfa651b14cdfb0 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql new file mode 100644 index 0000000000000000000000000000000000000000..64998eb0a070a7e846fb27e46173897875035395 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + IF scopes IS NULL THEN + return false; + END IF; + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql new file mode 100644 index 0000000000000000000000000000000000000000..3fcb41a0d6a8461a015ac825c6a21ec9af3476e9 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql @@ -0,0 +1,150 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; + +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..36f137d1768dfa06191276d5fbb6cdf1319b1ef6 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = FALSE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20221122.1__move161.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20221122.1__move161.sql new file mode 100644 index 0000000000000000000000000000000000000000..2bfd6cf469984dc77c1e20130833088fd0b3423d --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20221122.1__move161.sql @@ -0,0 +1,554 @@ +DROP TABLE csourceinformation; + +Alter table public.csource DROP COLUMN "location",DROP COLUMN "name", DROP COLUMN endpoint,DROP COLUMN description,DROP COLUMN timestamp_end,DROP COLUMN timestamp_start,DROP COLUMN tenant_id,DROP COLUMN internal,DROP COLUMN has_registrationinfo_with_attrs_only,DROP COLUMN has_registrationinfo_with_entityinfo_only,DROP COLUMN data_without_sysattrs,DROP COLUMN scopes, DROP COLUMN expires, DROP COLUMN type; + +ALTER TABLE PUBLIC.CSOURCE RENAME COLUMN data TO REG; + +alter table public.csource rename column id to c_id; + +ALTER TABLE PUBLIC.CSOURCE DROP CONSTRAINT csource_pkey; + +ALTER TABLE IF EXISTS public.csource + ADD CONSTRAINT unique_c_id UNIQUE (c_id); + +ALTER TABLE IF EXISTS public.csource + ADD COLUMN id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ); + +ALTER TABLE public.csource ADD PRIMARY KEY (id); + +CREATE INDEX i_csource_c_id + ON public.csource USING hash + (c_id text_pattern_ops); + +CREATE INDEX i_csource_id + ON public.csource USING btree + (id); + + +CREATE TABLE public.csourceinformation( + id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ), + cs_id bigint, + c_id text, + e_id text, + e_id_p text, + e_type text, + e_prop text, + e_rel text, + i_location GEOMETRY(Geometry, 4326), + scopes text[], + expires timestamp without time zone, + endpoint text, + tenant_id text, + headers jsonb, + reg_mode smallint, + createEntity boolean, + updateEntity boolean, + appendAttrs boolean, + updateAttrs boolean, + deleteAttrs boolean, + deleteEntity boolean, + createBatch boolean, + upsertBatch boolean, + updateBatch boolean, + deleteBatch boolean, + upsertTemporal boolean, + appendAttrsTemporal boolean, + deleteAttrsTemporal boolean, + updateAttrsTemporal boolean, + deleteAttrInstanceTemporal boolean, + deleteTemporal boolean, + mergeEntity boolean, + replaceEntity boolean, + replaceAttrs boolean, + mergeBatch boolean, + retrieveEntity boolean, + queryEntity boolean, + queryBatch boolean, + retrieveTemporal boolean, + queryTemporal boolean, + retrieveEntityTypes boolean, + retrieveEntityTypeDetails boolean, + retrieveEntityTypeInfo boolean, + retrieveAttrTypes boolean, + retrieveAttrTypeDetails boolean, + retrieveAttrTypeInfo boolean, + createSubscription boolean, + updateSubscription boolean, + retrieveSubscription boolean, + querySubscription boolean, + deleteSubscription boolean, + entityMap boolean, + canCompress boolean, + CONSTRAINT id_pkey PRIMARY KEY (id), + CONSTRAINT cs_id_fkey FOREIGN KEY (cs_id) + REFERENCES public.csource (id) MATCH SIMPLE + ON UPDATE CASCADE + ON DELETE CASCADE +); + + +CREATE INDEX IF NOT EXISTS fki_cs_id_fkey + ON public.csourceinformation(cs_id); + +CREATE INDEX i_csourceinformation_e_type + ON public.csourceinformation USING hash + (e_type text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_rel + ON public.csourceinformation USING hash + (e_rel text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_prop + ON public.csourceinformation USING hash + (e_prop text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_id + ON public.csourceinformation USING hash + (e_id text_pattern_ops); + +CREATE INDEX i_csourceinformation_i_location + ON public.csourceinformation USING gist + (i_location gist_geometry_ops_2d); + +DROP FUNCTION public.csource_extract_jsonb_fields_to_information_table cascade; +DROP Trigger csource_extract_jsonb_fields ON csource; + +CREATE TABLE temp ( + c_id text, + reg jsonb +); +INSERT INTO temp SELECT c_id, reg FROM csource; + +DELETE FROM csource; + +INSERT INTO csource SELECT c_id, reg FROM temp; + +drop table temp; + +ALTER TABLE PUBLIC.ENTITY RENAME COLUMN DATA TO ENTITY; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN DATA_WITHOUT_SYSATTRS; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN KVDATA; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OBSERVATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OPERATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN CONTEXT; + +ALTER TABLE PUBLIC.ENTITY ADD COLUMN E_TYPES TEXT[]; + +CREATE INDEX "I_entity_scopes" + ON public.entity USING gin + (scopes array_ops); + +CREATE INDEX "I_entity_types" + ON public.entity USING gin + (e_types array_ops); + +CREATE OR REPLACE FUNCTION public.entity_extract_jsonb_fields() RETURNS trigger LANGUAGE plpgsql AS $function$ + BEGIN + + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NULL AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NOT NULL AND OLD.ENTITY <> NEW.ENTITY) THEN + NEW.createdat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + IF (NEW.ENTITY@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.ENTITY ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + NEW.scopes = getScopes(NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + RETURN NEW; + END; +$function$; + +UPDATE ENTITY SET E_TYPES=array_append(E_TYPES,TYPE); + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN type; + + +CREATE OR REPLACE FUNCTION CSOURCE_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE +BEGIN + NEW.C_ID = NEW.REG#>>'{@id}'; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,false,false]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS(); + +CREATE OR REPLACE FUNCTION GETMODE (MODETEXT text) RETURNS smallint AS $registry_mode$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$registry_mode$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; + + + +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + RETURN jsonb_set(ENTITY,ARRAY[attribName], tmp); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; + + diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230108.1__subscription161.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230108.1__subscription161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c8115353d5ba16497cc30b10ef8a1fe6e0915041 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230108.1__subscription161.sql @@ -0,0 +1,18 @@ +DROP TABLE subscriptions; +DROP TABLE registry_subscriptions; + +CREATE TABLE public.subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); + +CREATE TABLE public.registry_subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230212.1__context.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230212.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..665c49dd33b0c8c5bfea4e2361c29df16fd01e7d --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230212.1__context.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS public.contexts +( + id text NOT NULL, + body jsonb NOT NULL, + kind text NOT NULL, + createdat timestamp without time zone, + PRIMARY KEY (id) +); +ALTER TABLE public.contexts alter createdat set default now(); diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230220.1__batchops161.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230220.1__batchops161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c31264330e2d38c953e892ff29b43295aedfc5ea --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230220.1__batchops161.sql @@ -0,0 +1,99 @@ +CREATE OR REPLACE FUNCTION NGSILD_CREATEBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOCR$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj['success'] = resultObj['success'] || (entity->'@id')::jsonb; + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOCR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_agg(entityId); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || (newentity->'@id')::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230309.1__datamigration161.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230309.1__datamigration161.sql new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230311.1__temporal161.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230311.1__temporal161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c502a34416bf47b00231f8be37f6dba50a7c0c55 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230311.1__temporal161.sql @@ -0,0 +1,65 @@ +ALTER TABLE PUBLIC.temporalentity ADD COLUMN E_TYPES TEXT[]; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN VALUE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN attributetype; +CREATE INDEX "I_temporalentity_types" + ON public.temporalentity USING gin + (e_types array_ops); +UPDATE temporalentity SET E_TYPES=array_append(E_TYPES,TYPE); +ALTER TABLE PUBLIC.temporalentity DROP COLUMN type; +ALTER TABLE PUBLIC.temporalentity ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN static; +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopeEntry (scopeList text[]) +RETURNS jsonb AS $scopes$ +declare + scopes jsonb; + i text; +BEGIN + scopes := '[]'::jsonb; + FOREACH i IN ARRAY scopeList LOOP + scopes = scopes || jsonb_build_object('@value', i); + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + if scopeList is null THEN + RETURN null; + END IF; + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE INDEX i_temporalentityattrinstance_attribname + ON public.temporalentityattrinstance USING hash + (attributeid text_ops); +CREATE INDEX i_temporalentity_location ON public.temporalentityattrinstance USING GIST (geovalue); \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230410.1__entitymap.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230410.1__entitymap.sql new file mode 100644 index 0000000000000000000000000000000000000000..92b172eb27cbfb372bfc729a44b1009b3946e4d5 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230410.1__entitymap.sql @@ -0,0 +1,19 @@ +CREATE TABLE public.entitymap +( + "q_token" text NOT NULL, + "entity_id" text, + "remote_hosts" jsonb, + "order_field" numeric NOT NULL +); + +CREATE INDEX i_entitymap_qtoken + ON public.entitymap USING hash + ("q_token" text_pattern_ops) +; + +CREATE TABLE public.entitymap_management +( + q_token text NOT NULL, + last_access timestamp with time zone NOT NULL, + PRIMARY KEY (q_token) +); diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230623.1__merge_patch.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230623.1__merge_patch.sql new file mode 100644 index 0000000000000000000000000000000000000000..684f327524131fa450d4e3deba24b4ab762ed4db --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230623.1__merge_patch.sql @@ -0,0 +1,36 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +if merged_json::text like '%"urn:ngsi-ld:null"%' THEN +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +end if; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..29a8a59a3c89cdad8b22af1254310c3d3f88c4c9 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql @@ -0,0 +1,29 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id'; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230705.1__core_context_store.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230705.1__core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..66bf42339d3705b05931f4a532703aa74769dc73 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230705.1__core_context_store.sql @@ -0,0 +1,300 @@ +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } +} +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql new file mode 100644 index 0000000000000000000000000000000000000000..af7e046119aac14e17ee33dc1cc6a074d723977c --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql @@ -0,0 +1,128 @@ +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230726.1__fixsubs.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230726.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..4520fbc02736783525f5e80a3980b023ce99263c --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230726.1__fixsubs.sql @@ -0,0 +1 @@ +update subscriptions set subscription=subscription-'https://uri.etsi.org/ngsi-ld/lastFailure ' \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230810.1__historyup.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230810.1__historyup.sql new file mode 100644 index 0000000000000000000000000000000000000000..06402b2bf88db1ca416edda068dc0dee6706574d --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230810.1__historyup.sql @@ -0,0 +1,39 @@ +ALTER TABLE IF EXISTS public.temporalentityattrinstance + ADD COLUMN IF NOT EXISTS location geometry; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_location + ON public.temporalentityattrinstance USING gist + (location) + WITH (buffering=auto) +; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_entityid + ON public.temporalentityattrinstance USING hash + (temporalentity_id) +; +with x as (SELECT distinct temporalentity_id as eid, geovalue, modifiedat as mat, observedat as oat, COALESCE(modifiedat, observedat) FROM temporalentityattrinstance WHERE geovalue is not null ORDER BY COALESCE(modifiedat, observedat)) UPDATE temporalentityattrinstance SET location = (SELECT x.geovalue FROM x WHERE eid = temporalentity_id and COALESCE(x.mat, x.oat) <= COALESCE(modifiedat, observedat) ORDER BY COALESCE(modifiedat, observedat) DESC limit 1) WHERE location is not null; + +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ + diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql new file mode 100644 index 0000000000000000000000000000000000000000..a17d3b8879ba7f194546f3f3ace5f41e42e9a2ec --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql @@ -0,0 +1,52 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql new file mode 100644 index 0000000000000000000000000000000000000000..82cac5034c11506304e8109eb2aa122cd408b952 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql @@ -0,0 +1,56 @@ +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + IF not attribValues ? 'https://uri.etsi.org/ngsi-ld/modifiedAt' THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + tmp := jsonb_set(tmp,Array['0','https://uri.etsi.org/ngsi-ld/modifiedAt'], Entity->'https://uri.etsi.org/ngsi-ld/modifiedAt',true); + END IF; + RETURN jsonb_set(Entity,Array[attribName,'0'], (Entity->attribName->0) || (tmp->0),true); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..833426b43969a0c3842988b8d0631e776f23cbd0 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql @@ -0,0 +1,314 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabularyProperty": "ngsi-ld:VocabularyProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20230904.1__fixsubs.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230904.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..02ca66926497a6b82e4bcf2d39ad6a5e9ec38489 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20230904.1__fixsubs.sql @@ -0,0 +1 @@ +UPDATE SUBSCRIPTIONS SET SUBSCRIPTION=JSONB_SET(SUBSCRIPTION, '{@id}', ('"'||SUBSCRIPTION_ID||'"')::jsonb, true); \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql new file mode 100644 index 0000000000000000000000000000000000000000..a09bbd49ecbaa11601b43f09a7d630fcbcaf446b --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql @@ -0,0 +1,96 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', entityId, 'old', prev_entity); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql new file mode 100644 index 0000000000000000000000000000000000000000..5088d096c22fe1aa5e8b82aa5391b25dbd76a0e3 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql @@ -0,0 +1,57 @@ +DROP FUNCTION merge_json(text,jsonb); + +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; +ret := jsonb_build_array(previous_entity, merged_json); + + RETURN ret; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..795a2f213be016348be3eebc8c31bcd77c9f3a8f --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql @@ -0,0 +1,25 @@ +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE teai.internalid = new.internalid and COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql new file mode 100644 index 0000000000000000000000000000000000000000..a7437255d864ad92561c657c4e23a22cb4d951b5 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql @@ -0,0 +1,75 @@ +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb, NOOVERWRITE boolean) RETURNS jsonb AS $ENTITYOAR$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Overwriting'); + ELSIF NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + ELSE + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + + RETURN resultObj; +END; +$ENTITYOAR$ +LANGUAGE PLPGSQL; + + +ALTER TABLE temporalentityattrinstance ADD COLUMN IF NOT EXISTS static boolean \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20231128.1__upsertfix.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20231128.1__upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..573c77b1b3701ed5532925bada113667267c7dbe --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20231128.1__upsertfix.sql @@ -0,0 +1,44 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..017016b3606fcb09d107b10217acec17bb799c2d --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql @@ -0,0 +1,363 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20240212.1__merge_batchops.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20240212.1__merge_batchops.sql new file mode 100644 index 0000000000000000000000000000000000000000..c5da5b65a9b6a9189123871366d0d474a238c250 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20240212.1__merge_batchops.sql @@ -0,0 +1,66 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON_BATCH(b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id',newentity->'@id')::jsonb; + ELSE + resultObj['failure'] := resultObj['failure'] || jsonb_object_agg(newentity->'@id'->>0, 'Not Found'); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20240319.1__context.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20240319.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..38ae052ffe9a214504c3912b7b5e6c1a92b17308 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20240319.1__context.sql @@ -0,0 +1,365 @@ +ALTER TABLE public.contexts add column lastUsage timestamp without time zone, add column numberOfHits bigint default 0; + +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'Hosted'); \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..19e8cf97e5ecba2781bc4d559f05787b4fd3e9a3 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql @@ -0,0 +1,663 @@ + +DROP TABLE IF EXISTS public.entitymap; +DROP TABLE IF EXISTS public.entitymap_management; +DROP FUNCTION IF EXISTS ngsild_appendbatch(jsonb); +DROP FUNCTION IF EXISTS ngsild_upsertbatch(jsonb); + +CREATE OR REPLACE FUNCTION public.ngsild_deletebatch(IN entity_ids jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, 'Not Found')); + else + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', prev_entity)); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_createbatch(IN entities jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || (entity->'@id')::jsonb); + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_appendbatch(IN entities jsonb,IN nooverwrite boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Overwriting')); + ELSIF NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + ELSE + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb); + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_upsertbatch(IN entities jsonb,IN do_replace boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE TABLE public.entitymap +( + id text, + expires_at timestamp without time zone, + last_access timestamp without time zone, + entity_map jsonb, + followup_select text, + PRIMARY KEY (id) +); + +CREATE OR REPLACE FUNCTION public.getmode(IN modetext text) + RETURNS smallint + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.updateMapIfNeeded(IN ids text[], ientityMap jsonb, entityMapToken text) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entityMapEntry jsonb; + +BEGIN + if array_length(ids, 1) = 0 or ids is null then + return ientityMap; + else + entityMapEntry := ientityMap -> 'entityMap'; + SELECT jsonb_agg(entry) INTO entityMapEntry FROM jsonb_array_elements(entityMapEntry) as entry, jsonb_object_keys(entry) as id WHERE NOT(id = ANY(ids)); + ientityMap := jsonb_set(ientityMap, '{entityMap}', entityMapEntry); + UPDATE ENTITYMAP SET LAST_ACCESS = NOW(), entity_map = ientityMap WHERE id=entityMapToken; + return ientityMap; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.getEntityMapAndEntities(IN entityMapToken text, ids text[], ilimit int, ioffset int) + RETURNS TABLE(id text, entity jsonb, parent boolean, e_types text[], entity_map jsonb) + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entitymap jsonb; + regempty boolean; + noRootLevelRegEntry boolean; + queryText text; +BEGIN + if ids is null or array_length(ids, 1) = 0 then + UPDATE ENTITYMAP SET LAST_ACCESS = NOW() WHERE ENTITYMAP.id=entityMapToken RETURNING ENTITYMAP.ENTITY_MAP INTO entitymap; + if entitymap is null then + RAISE EXCEPTION 'Nonexistent ID --> %', entityMapToken USING ERRCODE = 'S0001'; + end if; + regempty := entitymap -> 'regEmptyOrNoRegEntryAndNoLinkedQuery'; + noRootLevelRegEntry := entitymap -> 'noRootLevelRegEntryAndLinkedQuery'; + + if regempty or noRootLevelRegEntry then + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY > $2), ' + || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ' limit $3), X as (SELECT D0.ID as id, max(D0.ordinality) as maxOrdinality FROM D0 GROUP BY D0.ID), C as (SELECT updateMapIfNeeded(ids.aggIds, $4, $5) as entity_map FROM (SELECT ARRAY_AGG(a.id) as aggIds FROM a LEFT JOIN X ON a.id = X.ID WHERE X.ID IS NULL AND a.ordinality <= X.maxOrdinality) as ids)' + || (entitymap ->> 'finalselect')) using (entitymap->'entityMap'), ioffset, ilimit, entitymap, entityMapToken; + else + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY between $2 and ($2 + $3) and entityIdEntry.value ? ''@none''), C as (SELECT $4 as entity_map), ' || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ')' ||(entitymap ->> 'finalselect')) using entitymap->'entityMap', ioffset, ilimit, entitymap; + end if; + else + if regempty or noRootLevelRegEntry then + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + else + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + end if; + end if; +END; +$BODY$; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS entitymap; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS cancompress; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN queryEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN createEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN updateEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN deleteEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN retrieveEntityMap boolean; + +UPDATE public.csourceinformation SET queryEntityMap = false,createEntityMap = false, updateEntityMap = false, deleteEntityMap = false,retrieveEntityMap = false; + +CREATE OR REPLACE FUNCTION public.getoperations(IN operationjson jsonb) + RETURNS boolean[] + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + operations[37] = true; + operations[38] = true; + operations[39] = true; + operations[40] = true; + operations[41] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'queryEntityMap' THEN + operations[37] = true; + WHEN 'createEntityMap' THEN + operations[38] = true; + WHEN 'updateEntityMap' THEN + operations[39] = true; + WHEN 'deleteEntityMap' THEN + operations[40] = true; + WHEN 'retrieveEntityMap' THEN + operations[41] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.csourceinformation_extract_jsonb_fields() + RETURNS trigger + LANGUAGE 'plpgsql' + VOLATILE + COST 100 +AS $BODY$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..474a2ef4780544dc6697fefec62900f6c79bc1ed --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql @@ -0,0 +1,834 @@ +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id',newentity->>'@id', 'old', previous_entity)); + ELSE + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$BODY$; + +UPDATE contexts SET body = '{ + + "@context": { + + "@version": 1.1, + + "@protected": true, + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + + "geojson": "https://purl.org/geojson/vocab#", + + "id": "@id", + + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + + "AttributeList": "ngsi-ld:AttributeList", + + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + + "Date": "ngsi-ld:Date", + + "DateTime": "ngsi-ld:DateTime", + + "EntityType": "ngsi-ld:EntityType", + + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + + "EntityTypeList": "ngsi-ld:EntityTypeList", + + "Feature": "geojson:Feature", + + "FeatureCollection": "geojson:FeatureCollection", + + "GeoProperty": "ngsi-ld:GeoProperty", + + "GeometryCollection": "geojson:GeometryCollection", + + "JsonProperty": "ngsi-ld:JsonProperty", + + "LanguageProperty": "ngsi-ld:LanguageProperty", + + "LineString": "geojson:LineString", + + "ListProperty": "ngsi-ld:ListProperty", + + "ListRelationship": "ngsi-ld:ListRelationship", + + "MultiLineString": "geojson:MultiLineString", + + "MultiPoint": "geojson:MultiPoint", + + "MultiPolygon": "geojson:MultiPolygon", + + "Notification": "ngsi-ld:Notification", + + "Point": "geojson:Point", + + "Polygon": "geojson:Polygon", + + "Property": "ngsi-ld:Property", + + "Relationship": "ngsi-ld:Relationship", + + "Subscription": "ngsi-ld:Subscription", + + "TemporalProperty": "ngsi-ld:TemporalProperty", + + "Time": "ngsi-ld:Time", + + "VocabProperty": "ngsi-ld:VocabProperty", + + "accept": "ngsi-ld:accept", + + "attributeCount": "attributeCount", + + "attributeDetails": "attributeDetails", + + "attributeList": { + + "@id": "ngsi-ld:attributeList", + + "@type": "@vocab" + + }, + + "attributeName": { + + "@id": "ngsi-ld:attributeName", + + "@type": "@vocab" + + }, + + "attributeNames": { + + "@id": "ngsi-ld:attributeNames", + + "@type": "@vocab" + + }, + + "attributeTypes": { + + "@id": "ngsi-ld:attributeTypes", + + "@type": "@vocab" + + }, + + "attributes": { + + "@id": "ngsi-ld:attributes", + + "@type": "@vocab" + + }, + + "attrs": "ngsi-ld:attrs", + + "avg": { + + "@id": "ngsi-ld:avg", + + "@container": "@list" + + }, + + "bbox": { + + "@container": "@list", + + "@id": "geojson:bbox" + + }, + + "cacheDuration": "ngsi-ld:cacheDuration", + + "containedBy": "ngsi-ld:isContainedBy", + + "contextSourceAlias": "ngsi-ld:contextSourceAlias", + + "contextSourceExtras": { + + "@id": "ngsi-ld:contextSourceExtras", + + "@type": "@json" + + }, + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + + "contextSourceTimeAt": { + + "@id": "ngsi-ld:contextSourceTimeAt", + + "@type": "DateTime" + + }, + + "contextSourceUptime": "ngsi-ld:contextSourceUptime", + + "cooldown": "ngsi-ld:cooldown", + + "coordinates": { + + "@container": "@list", + + "@id": "geojson:coordinates" + + }, + + "createdAt": { + + "@id": "ngsi-ld:createdAt", + + "@type": "DateTime" + + }, + + "csf": "ngsi-ld:csf", + + "data": "ngsi-ld:data", + + "dataset": { + + "@id": "ngsi-ld:hasDataset", + + "@container": "@index" + + }, + + "datasetId": { + + "@id": "ngsi-ld:datasetId", + + "@type": "@id" + + }, + + "deletedAt": { + + "@id": "ngsi-ld:deletedAt", + + "@type": "DateTime" + + }, + + "description": "http://purl.org/dc/terms/description", + + "detail": "ngsi-ld:detail", + + "distinctCount": { + + "@id": "ngsi-ld:distinctCount", + + "@container": "@list" + + }, + + "endAt": { + + "@id": "ngsi-ld:endAt", + + "@type": "DateTime" + + }, + + "endTimeAt": { + + "@id": "ngsi-ld:endTimeAt", + + "@type": "DateTime" + + }, + + "endpoint": "ngsi-ld:endpoint", + + "entities": "ngsi-ld:entities", + + "pick": "ngsi-ld:pick", + + "omit": "ngsi-ld:omit", + + "jsonKeys": "ngsi-ld:jsonKeys", + + "entity": "ngsi-ld:entity", + + "entityCount": "ngsi-ld:entityCount", + + "entityId": { + + "@id": "ngsi-ld:entityId", + + "@type": "@id" + + }, + + "entityList": { + + "@id": "ngsi-ld:entityList", + + "@container": "@list" + + }, + + "entityMap": "ngsi-ld:hasEntityMap", + + "error": "ngsi-ld:error", + + "errors": "ngsi-ld:errors", + + "expiresAt": { + + "@id": "ngsi-ld:expiresAt", + + "@type": "DateTime" + + }, + + "features": { + + "@container": "@set", + + "@id": "geojson:features" + + }, + + "format": "ngsi-ld:format", + + "geoQ": "ngsi-ld:geoQ", + + "geometry": "geojson:geometry", + + "geoproperty": "ngsi-ld:geoproperty", + + "georel": "ngsi-ld:georel", + + "idPattern": "ngsi-ld:idPattern", + + "information": "ngsi-ld:information", + + "instanceId": { + + "@id": "ngsi-ld:instanceId", + + "@type": "@id" + + }, + + "isActive": "ngsi-ld:isActive", + + "join": "ngsi-ld:join", + + "joinLevel": "ngsi-ld:hasJoinLevel", + + "json": { + + "@id": "ngsi-ld:hasJSON", "@type": "@json" + + }, + + "jsons": { + + "@id": "ngsi-ld:jsons", + + "@container": "@list" + + }, + + "key": "ngsi-ld:hasKey", + + "lang": "ngsi-ld:lang", + + "languageMap": { + + "@id": "ngsi-ld:hasLanguageMap", + + "@container": "@language" + + }, + + "languageMaps": { + + "@id": "ngsi-ld:hasLanguageMaps", + + "@container": "@list" + + }, + + "lastFailure": { + + "@id": "ngsi-ld:lastFailure", + + "@type": "DateTime" + + }, + + "lastNotification": { + + "@id": "ngsi-ld:lastNotification", + + "@type": "DateTime" + + }, + + "lastSuccess": { + + "@id": "ngsi-ld:lastSuccess", + + "@type": "DateTime" + + }, + + "linkedMaps": "ngsi-ld:linkedMaps", + + "localOnly": "ngsi-ld:localOnly", + + "location": "ngsi-ld:location", + + "management": "ngsi-ld:management", + + "managementInterval": "ngsi-ld:managementInterval", + + "max": { + + "@id": "ngsi-ld:max", + + "@container": "@list" + + }, + + "min": { + + "@id": "ngsi-ld:min", + + "@container": "@list" + + }, + + "mode": "ngsi-ld:mode", + + "modifiedAt": { + + "@id": "ngsi-ld:modifiedAt", + + "@type": "DateTime" + + }, + + "notification": "ngsi-ld:notification", + + "notificationTrigger": "ngsi-ld:notificationTrigger", + + "notifiedAt": { + + "@id": "ngsi-ld:notifiedAt", + + "@type": "DateTime" + + }, + + "notifierInfo": "ngsi-ld:notifierInfo", + + "notUpdated": "ngsi-ld:notUpdated", + + "object": { + + "@id": "ngsi-ld:hasObject", + + "@type": "@id" + + }, + + "objectList": { + + "@id": "ngsi-ld:hasObjectList", + + "@container": "@list" + + }, + + "objects": { + + "@id": "ngsi-ld:hasObjects", + + "@container": "@list" + + }, + + "objectsLists": { + + "@id": "ngsi-ld:hasObjectsLists", + + "@container": "@list" + + }, + + "objectType": { + + "@id": "ngsi-ld:hasObjectType", + + "@type": "@vocab" + + }, + + "observationInterval": "ngsi-ld:observationInterval", + + "observationSpace": "ngsi-ld:observationSpace", + + "observedAt": { + + "@id": "ngsi-ld:observedAt", + + "@type": "DateTime" + + }, + + "operationSpace": "ngsi-ld:operationSpace", + + "operations": "ngsi-ld:operations", + + "previousJson": { + + "@id": "ngsi-ld:hasPreviousJson", + + "@type": "@json" + + }, + + "previousLanguageMap": { + + "@id": "ngsi-ld:hasPreviousLanguageMap", + + "@container": "@language" + + }, + + "previousObject": { + + "@id": "ngsi-ld:hasPreviousObject", + + "@type": "@id" + + }, + + "previousObjectList": { + + "@id": "ngsi-ld:hasPreviousObjectList", + + "@container": "@list" + + }, + + "previousValue": "ngsi-ld:hasPreviousValue", + + "previousValueList": { + + "@id": "ngsi-ld:hasPreviousValueList", + + "@container": "@list" + + }, + + "previousVocab": { + + "@id": "ngsi-ld:hasPreviousVocab", + + "@type": "@vocab" + + }, + + "properties": "geojson:properties", + + "propertyNames": { + + "@id": "ngsi-ld:propertyNames", + + "@type": "@vocab" + + }, + + "q": "ngsi-ld:q", + + "reason": "ngsi-ld:reason", + + "receiverInfo": "ngsi-ld:receiverInfo", + + "refreshRate": "ngsi-ld:refreshRate", + + "registrationId": "ngsi-ld:registrationId", + + "registrationName": "ngsi-ld:registrationName", + + "relationshipNames": { + + "@id": "ngsi-ld:relationshipNames", + + "@type": "@vocab" + + }, + + "scope": "ngsi-ld:scope", + + "scopeQ": "ngsi-ld:scopeQ", + + "showChanges": "ngsi-ld:showChanges", + + "startAt": { + + "@id": "ngsi-ld:startAt", + + "@type": "DateTime" + + }, + + "status": "ngsi-ld:status", + + "stddev": { + + "@id": "ngsi-ld:stddev", + + "@container": "@list" + + }, + + "subscriptionId": { + + "@id": "ngsi-ld:subscriptionId", + + "@type": "@id" + + }, + + "subscriptionName": "ngsi-ld:subscriptionName", + + "success": { + + "@id": "ngsi-ld:success", + + "@type": "@id" + + }, + + "sum": { + + "@id": "ngsi-ld:sum", + + "@container": "@list" + + }, + + "sumsq": { + + "@id": "ngsi-ld:sumsq", + + "@container": "@list" + + }, + + "sysAttrs": "ngsi-ld:sysAttrs", + + "temporalQ": "ngsi-ld:temporalQ", + + "tenant": { + + "@id": "ngsi-ld:tenant", + + "@type": "@id" + + }, + + "throttling": "ngsi-ld:throttling", + + "timeAt": { + + "@id": "ngsi-ld:timeAt", + + "@type": "DateTime" + + }, + + "timeInterval": "ngsi-ld:timeInterval", + + "timeout": "ngsi-ld:timeout", + + "timeproperty": "ngsi-ld:timeproperty", + + "timerel": "ngsi-ld:timerel", + + "timesFailed": "ngsi-ld:timesFailed", + + "timesSent": "ngsi-ld:timesSent", + + "title": "http://purl.org/dc/terms/title", + + "totalCount": { + + "@id": "ngsi-ld:totalCount", + + "@container": "@list" + + }, + + "triggerReason": "ngsi-ld:triggerReason", + + "typeList": { + + "@id": "ngsi-ld:typeList", + + "@type": "@vocab" + + }, + + "typeName": { + + "@id": "ngsi-ld:typeName", + + "@type": "@vocab" + + }, + + "typeNames": { + + "@id": "ngsi-ld:typeNames", + + "@type": "@vocab" + + }, + + "unchanged": "ngsi-ld:unchanged", + + "unitCode": "ngsi-ld:unitCode", + + "updated": "ngsi-ld:updated", + + "uri": "ngsi-ld:uri", + + "value": "ngsi-ld:hasValue", + + "valueList": { + + "@id": "ngsi-ld:hasValueList", + + "@container": "@list" + + }, + + "valueLists": { + + "@id": "ngsi-ld:hasValueLists", + + "@container": "@list" + + }, + + "values": { + + "@id": "ngsi-ld:hasValues", + + "@container": "@list" + + }, + + "vocab": { + + "@id": "ngsi-ld:hasVocab", + + "@type": "@vocab" + + }, + + "vocabs": { + + "@id": "ngsi-ld:hasVocabs", + + "@container": "@list" + + }, + + "watchedAttributes": { + + "@id": "ngsi-ld:watchedAttributes", + + "@type": "@vocab" + + }, + + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + + } + +} + +'::jsonb WHERE id=')$%^&'; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..90d4785b7e7d4b82c6ac1bf4c88ac56043f995bc --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql @@ -0,0 +1,963 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_point(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE +BEGIN + if not geo_json_entry ? '@list' or jsonb_array_length(geo_json_entry #> '{@list}') != 2 then + RAISE EXCEPTION 'Invalid geo point for geo json' USING ERRCODE = 'SB006'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.VALIDATE_GEO_JSON(IN GEO_JSON_ENTRY JSONB) RETURNS VOID LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + geo_type text; + value jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.clean_ngsi_ld_null(IN json_entry jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + json_type text; + result jsonb; + value jsonb; + cleaned jsonb; + key text; +BEGIN + json_type = jsonb_typeof(json_entry); + if json_type = 'array' then + result = '[]'::jsonb; + for value in select * from jsonb_array_elements(json_entry) loop + cleaned = clean_ngsi_ld_null(value); + if cleaned is not null then + result = result || cleaned; + end if; + end loop; + if jsonb_array_length(result) = 0 then + return null; + end if; + return result; + elsif json_type = 'object' then + result = '{}'; + for key, value in Select * from jsonb_each(json_entry) loop + if value::text != '"urn:ngsi-ld:null"' then + result = jsonb_set(result, '{key}', value); + end if; + end loop; + if result::text = '{}' then + return null; + end if; + return result; + else + if json_entry::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return json_entry; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_json(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_type text; + old_type text; + todelete jsonb; + deleted integer; + i integer; + index integer; + value jsonb; + value2 jsonb; + merged_json jsonb; + key text; +BEGIN + new_type = jsonb_typeof(new_attrib); + old_type = jsonb_typeof(old_attrib); + if old_attrib is null or new_type != old_type then + old_attrib := new_attrib; + end if; + todelete = '[]'::jsonb; + if new_type = 'array' then + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + for i in 0 .. jsonb_array_length(new_attrib) loop + if new_attrib ->> i = 'urn:ngsi-ld:null' then + todelete = todelete || i; + end if; + end loop; + deleted = 0; + if array_length(todelete) > 0 then + for i in select * from jsonb_array_elements(todelete) loop + new_attrib = new_attrib - (i - deleted); + deleted = deleted + 1; + end loop; + end if; + return new_attrib; + end if; + index = 0; + deleted = 0; + for value in select * from jsonb_array_elements(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + index := index + 1; + continue; + end if; + value2 = old_attrib[index - deleted]; + merged_json = merge_has_json(value, value2); + if merged_json is null then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - deleted)]::text[], merged_json); + end if; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + elsif new_type = 'object' then + for key, value in Select * from jsonb_each(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - key; + continue; + end if; + merged_json = merge_has_json(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + continue; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end loop; + if old_attrib::text = '{}' then + return null; + end if; + return old_attrib; + else + if new_attrib::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return new_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_vocab(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_language_map(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + index integer; + remove boolean; + value2 jsonb; + ln_found boolean; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in Select * from jsonb_array_elements(new_attrib) loop + if value ->> '@language' = '@none' and value ->> '@value' = 'urn:ngsi-ld:null' then + return null; + else + index = 0; + ln_found = false; + remove = false; + for value2 in Select * from jsonb_array_elements(old_attrib) loop + if value2 ->> '@language' = value->> '@language' then + ln_found = true; + if value ->> '@value' = 'urn:ngsi-ld:null' then + remove = true; + end if; + exit; + end if; + index = index + 1; + end loop; + if ln_found then + if remove then + old_attrib = old_attrib - index; + else + old_attrib = jsonb_set(old_attrib, ARRAY[index,'@value']::text[], value->'@value'); + end if; + else + old_attrib = old_attrib || value; + end if; + end if; + end loop; + RETURN old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_geo(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + key text; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,https://purl.org/geojson/vocab#coordinates,0,@list,0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + for value in select * from jsonb_array_elements(new_attrib) loop + PERFORM validate_geo_json(value); + end loop; + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://purl.org/geojson/vocab#coordinates' then + if value2 #>> '{0,@list,0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + elsif key = '@type' then + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + PERFORM validate_geo_json(old_attrib[(index - removed)]); + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib #> '{0,@list}'; + if old_attrib is null then + old_attrib = new_attrib; + end if; + old_value_list = old_attrib #> '{0,@list}'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed)]::text[], (old_attrib #> ARRAY[0,'@list',(index-removed)]::text[]) - key); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed),key]::text[], merged_json); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB004'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; + +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib -> '@list'; + if old_attrib is null then + old_attrib := new_attrib; + end if; + old_value_list = old_attrib -> '@list'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], value2); + end if; + elsif key = '@list' then + merged_json = merge_has_value_list(value, old_value_list[index - removed]); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + + else + merged_json = merge_has_value(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + arr_idx integer; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + arr_idx := index - removed; + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - arr_idx; + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], value2); + end if; + else + arr_idx := index - removed; + merged_json = merge_has_value(value2, old_attrib #> ARRAY[arr_idx,key]::text[]); + if merged_json is null then + old_attrib[arr_idx] = old_attrib[arr_idx] - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_ATTRIB_INSTANCE(IN NEW_ATTRIB JSONB, + + IN OLD_ATTRIB JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + attrib_type TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + new_attrib := new_attrib - 'https://uri.etsi.org/ngsi-ld/createdAt'; + attrib_type := old_attrib #>> '{@type,0}'; + if attrib_type != new_attrib #>> '{@type,0}' then + RAISE EXCEPTION 'Cannot change type of an attribute' USING ERRCODE = 'SB001'; + end if; + if attrib_type = 'https://uri.etsi.org/ngsi-ld/Property' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/Relationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValueList' then + merged_json = merge_has_value_list(value[0], old_attrib #> '{key,0}'); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListRelationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectList' then + merged_json = merge_has_object_list(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/GeoProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value_geo(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/LanguageProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasLanguageMap' then + merged_json = merge_has_language_map(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/VocabProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasVocab' then + merged_json = merge_has_vocab(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/JsonProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasJSON' then + merged_json = merge_has_json(value #> ARRAY[0,'@value']::text[], old_attrib #> ARRAY[key,0,'@value']::text[]); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key,0,'@value']::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + else + RAISE EXCEPTION 'Unknown type of an attribute %, %, %', attrib_type, old_attrib, new_attrib USING ERRCODE = 'SB002'; + end if; + return old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + else + if new_dataset_id is null then + deleted := deleted || 'null'; + else + deleted := deleted || new_dataset_id; + end if; + end if; + else + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_JSON(IN A text,IN B JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL UNSAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + deleted JSONB; + updated JSONB; +BEGIN + +Select entity into previous_entity from entity where id =a; +if previous_entity is null then + RAISE EXCEPTION 'Entity not found.' USING ERRCODE = '02000'; +end if; +Select entity into merged_json from entity where id =a; +deleted := '{}'; +updated := '{}'; +-- Iterate through keys in JSON B +FOR key, value IN SELECT * FROM JSONB_EACH(b) +LOOP + if key = '@id' or key = 'https://uri.etsi.org/ngsi-ld/createdAt'then + continue; + elsif key = '@type' then + value2 = merged_json -> key; + WITH combined AS ( + SELECT jsonb_array_elements(value) AS elem + UNION + SELECT jsonb_array_elements(value2) AS elem + ) + SELECT jsonb_agg(elem) into value2 AS merged_array FROM combined; + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' then + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value); + else + value2 = merged_json -> key; + value2 = merge_attrib(value, value2); + if value2 ->'result' = 'null'::jsonb or jsonb_array_length(value2 ->'result') = 0 then + merged_json = merged_json - key; + deleted = jsonb_set(deleted, ARRAY[key]::text[], '["@all"]'::jsonb); + else + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2 -> 'result'); + if jsonb_array_length(value2 -> 'deleted') != 0 then + if deleted ? key then + deleted = jsonb_set(deleted, ARRAY[key], ((deleted -> key) || (value2 -> 'deleted'))); + else + deleted = jsonb_set(deleted, ARRAY[key], ((value2 -> 'deleted'))); + end if; + end if; + + if jsonb_array_length(value2 -> 'updated') != 0 then + if updated ? key then + updated = jsonb_set(updated, ARRAY[key], ((updated -> key) || (value2 -> 'updated'))); + else + updated = jsonb_set(updated, ARRAY[key], ((value2 -> 'updated'))); + end if; + end if; + + end if; + + + end if; +END LOOP; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + +RETURN jsonb_build_object('old', previous_entity, 'new', merged_json, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; + newentity jsonb; + resultObj jsonb; + entityId text; + index integer; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + index := 0; + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + entityId := newentity->>'@id'; + IF entityId is null then + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object('no id row nr ' || index, 'No entity id provided')); + else + BEGIN + ret := MERGE_JSON(entityId, newentity); + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', ret -> 'old', 'new', ret -> 'new', 'deleted', ret -> 'deleted', 'updated', ret -> 'updated')::jsonb); + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entityId, SQLSTATE)); + END; + end if; + index := index + 1; + END LOOP; + RETURN resultObj; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..f9eea8fca78af88cd9cca10817372067d2fec0e3 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql @@ -0,0 +1,69 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_json(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + geo_type text; + value jsonb; + value2 jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPoint' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi point update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiLineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPolygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/db/migration/V20240922.1__mergeattrib.sql b/scorpio-broker/RegistryManager/target/classes/db/migration/V20240922.1__mergeattrib.sql new file mode 100644 index 0000000000000000000000000000000000000000..98411df52c5cfd8208b71983d6624d4bfd7452b9 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/classes/db/migration/V20240922.1__mergeattrib.sql @@ -0,0 +1,71 @@ +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || ('null'::jsonb); + else + updated := updated || to_jsonb(new_dataset_id); + end if; + else + if new_dataset_id is null then + deleted := deleted || ('null'::jsonb); + else + deleted := deleted || to_jsonb(new_dataset_id); + end if; + end if; + else + if new_dataset_id is null then + updated := updated || ('null'::jsonb); + else + updated := updated || to_jsonb(new_dataset_id); + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/RegistryManager/target/classes/eu/neclab/ngsildbroker/registryhandler/controller/RegistryController.class b/scorpio-broker/RegistryManager/target/classes/eu/neclab/ngsildbroker/registryhandler/controller/RegistryController.class new file mode 100644 index 0000000000000000000000000000000000000000..5a0b1102832ca5c4e9c9104e89d402789ae8a658 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/classes/eu/neclab/ngsildbroker/registryhandler/controller/RegistryController.class differ diff --git a/scorpio-broker/RegistryManager/target/classes/eu/neclab/ngsildbroker/registryhandler/repository/CSourceDAO.class b/scorpio-broker/RegistryManager/target/classes/eu/neclab/ngsildbroker/registryhandler/repository/CSourceDAO.class new file mode 100644 index 0000000000000000000000000000000000000000..4227a0fe603d6342d47b8a04b1a1afa79386b45a Binary files /dev/null and b/scorpio-broker/RegistryManager/target/classes/eu/neclab/ngsildbroker/registryhandler/repository/CSourceDAO.class differ diff --git a/scorpio-broker/RegistryManager/target/classes/eu/neclab/ngsildbroker/registryhandler/service/CSourceService.class b/scorpio-broker/RegistryManager/target/classes/eu/neclab/ngsildbroker/registryhandler/service/CSourceService.class new file mode 100644 index 0000000000000000000000000000000000000000..fb5649948a95688ea81bbbae5e520f0d19ae789b Binary files /dev/null and b/scorpio-broker/RegistryManager/target/classes/eu/neclab/ngsildbroker/registryhandler/service/CSourceService.class differ diff --git a/scorpio-broker/RegistryManager/target/maven-archiver/pom.properties b/scorpio-broker/RegistryManager/target/maven-archiver/pom.properties new file mode 100644 index 0000000000000000000000000000000000000000..da93f68e3bd892419e0630ca90818da6e8252ca5 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/maven-archiver/pom.properties @@ -0,0 +1,5 @@ +#Generated by Maven +#Fri Jan 03 03:27:53 UTC 2025 +groupId=eu.neclab.ngsildbroker +artifactId=registry-manager +version=5.0.5-SNAPSHOT diff --git a/scorpio-broker/RegistryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst b/scorpio-broker/RegistryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..2c7ee6b6b38d0fc6acd254bee968a109c1fb8066 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst @@ -0,0 +1,3 @@ +eu/neclab/ngsildbroker/registryhandler/controller/RegistryController.class +eu/neclab/ngsildbroker/registryhandler/repository/CSourceDAO.class +eu/neclab/ngsildbroker/registryhandler/service/CSourceService.class diff --git a/scorpio-broker/RegistryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst b/scorpio-broker/RegistryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..d5c44ed086f841680f0f127b48b607cf0308d22f --- /dev/null +++ b/scorpio-broker/RegistryManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst @@ -0,0 +1,3 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/controller/RegistryController.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/repository/CSourceDAO.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistryManager/src/main/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceService.java diff --git a/scorpio-broker/RegistryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst b/scorpio-broker/RegistryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..24a3dc0bb1f3072b9bf16084658d199a2d756c21 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst @@ -0,0 +1,3 @@ +eu/neclab/ngsildbroker/registryhandler/controller/RegistryControllerTest.class +eu/neclab/ngsildbroker/registryhandler/service/CSourceServiceTest.class +eu/neclab/ngsildbroker/registryhandler/controller/CustomProfile.class diff --git a/scorpio-broker/RegistryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst b/scorpio-broker/RegistryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..fe3549b603e242125071b2e766df7f7b03120c11 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst @@ -0,0 +1,3 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/controller/CustomProfile.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/controller/RegistryControllerTest.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistryManager/src/test/java/eu/neclab/ngsildbroker/registryhandler/service/CSourceServiceTest.java diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/app/registry-manager-5.0.5-SNAPSHOT.jar b/scorpio-broker/RegistryManager/target/quarkus-app/app/registry-manager-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..8c060ebbae5c55c8277f1f9dc3c22ae2ef261be0 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/app/registry-manager-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..9d5d452f4a66d1165b27d4d604d2bddaad0e4cd7 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d9c956f0001cff936eb20c373592dcc6510f5ff Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f9f583af71764a7127e4da73677ed5c8dea97bfb Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0447153ded16f86775cdcb337f8576579c1ac0db Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..243fa559c2be0f61720e1a333e43d4da2e8b5516 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ce4cbac674f51eb2063ff475a0e70484b25ace9f Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..55063658b2d25baf50b6a3963c508233f695b3de Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..4a7a8e4b537b532f4f58717f3366e9cb6f0ff0ce Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b320c9d42b93b26981927265e5dc62b85e73263f Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d2520242b28d239ba3b138c17e65f8ae6103a787 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..0648aa9f6a2ea3b3ddd083471f1c8dacbb4bbc4f Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..134f669d9fbee6ca61a9c9bb36227376ed97d0a2 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3365ab16bb0cd576f88b808e9af9b024111a5070 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..778ea557232b0fb41df34d63353c219a371660b2 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fd5c901f4bb0e8ed59d2d040740021a7c5cf1b19 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f74846f3f35230a9e14c0bf98e4cccfec593b4c9 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ee6d31a6a025d92f9fb05550483fb6b415b7f066 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..52835b2540d6cb8aaffe22bd5c7c24203cc77538 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..dba6fdd40e73a6dfc06a462bc2646c2bfb7e6d5a Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c13bcb9104e907195d13bbb4f998c1e5594cc2e8 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..34be9026612b9553f55f5f1aed148fc96a9d8fcb Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3750b8c1cfae96e79305618c78653ac5fb9b6de5 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..327ee706dcf46e428dd6339b9744e77941e5b498 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c47febca79456ba4d389bbb46ea0e11e6a41bede Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6ff58aed923740c9a4f639b9a512dcfd08df921 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3aa01f1ee73130e4983d2c3520220b29995c4ccc Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..288bf56e1b4f5c5a2bb2152887c5ef12e6cddeae Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..f4f1af783043658e2b3879560b6e1ff0b8db66a1 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..85cefa6d7b5b644fb99075f6621ca60beb350cd9 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..c4b2b999ce72dcfdd9b63d6a62d0a2d7e2037034 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1d6d1c50a344b45ba375d53775b70ad5aac58f7 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..18e59c8c41de73e02e77298e981fa7e3051e4b5d Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a8f5dfe1b83122a9f085da1aa7fff451ed88e783 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b56dc751c1cc7dff75ed80ccbb45f027058e8ce Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar new file mode 100644 index 0000000000000000000000000000000000000000..10d10b62a49ad095f56d620620ee7eaa5d2fc62d Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..3595c4f9be5c0ce779f8dd611e7f6917ca518f5d Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a3336373b7aea1700b62d9aa60a15493586c3e8a Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..71079166b7bc51455b1e1d18ea4e5e942b3ae89f Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cbb633729cae09e5d65aefccd7b63c697f42b5cb Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eecfb70406fbaca61c7c9e5a549f77cbef2e849b Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a690bed6986df8a510ee4f05b2079264db7d71af Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5023670d73e75c539b0af285d35c4e9edaef2211 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eb3c2b0b82115e9820f781e944312b4c19b25ed4 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..705f285c9348d57ec059c73b90ed9836f4db6aa4 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..684cc24821451b65b9fc36376131490a9d03b37c Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..36f71a550601f4842536ad1a62a187a30c60eb69 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..b4f917b9687dc231c8f3b4cf3fc0a95e616846ea Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..878cc677337985f59ed9f4bb5cfcdb8ca4d0acbe Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..9afa6d70ae20b7082f786920e918fd70c138a5b3 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b626ceafb52c318581529fafbd1d33889f25c20 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..043052d031f59a3b289cc2bb7dda9b8352c58e11 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..316bdec6ab1a6ea4cd4dc33c9217cdf96e4c9049 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..73e95705d3152472d11fa2a5690626b652ff280a Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..7192fa0e49a0762ad44c218215e6197dd12197b2 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..4ecfb5dbec2f25d201de0a83d1143729830d49bd Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..072d06d46d38bcb7a63efb38075bf79ea111caf1 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e19ecdbd4d53bc0c09246f73926aaae70fe9493 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..08c1d314876730dc6f82ba65e741f03b8719609d Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..3427ee30e0e35e876eda5e5f1bed695d8c2636e9 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5facdd8e2f6e4f4fa3ceecc16962b3b15b9627c2 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..575355b83ce351f70a605a46ef5fa7f75f956835 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..39f5787fb3d458de0fd9f575345176b60e050961 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..a1e0f39535831b6f914098b776513c284c50b351 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..9637836909b5e59f7a83f88412a8571b4641f56b Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..93bc0ad010aab29e0a328d2ffafaeb60c82b3ecf Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..f06de9ed2afaa3668bccce11dcfe8924446817d0 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9f5b95529a5c7ac6044ffa4c60e6d412a1597104 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..9edc36a8a46a29b0e5a507fbd092a0e6629b8db7 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fe2973fcc47beca49e8e4061fe60a78c2a27102 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..0aedb22b8903d6eb70232c1e310266e0465b5f54 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..084d172fc3512a05145851e902ab1377a9d8b814 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..b740a4a827c0808baeda7112a5c40158e4eba664 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f99a4fc63360f9f5d1f0d751928b0b93d62ac81 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..75e3104ca9d784fa7fcaca27ecd239c02ac6e241 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3eab7c30fefb9c141226da5595a5faf9ff83d462 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..196cc61fca2b75593a6ac4cd38744fb6fcdc33bd Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..070baec335bcc35a79bd1ddf7ee52e809331c4c3 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..80221e424d28060c8620964fef488107c54f4e94 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c607cbdf717926be37b821d11c9c4b0ed9a2802 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..17262bba0ff1a57d9e893d832f8268389adfe340 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..30ef9922f2c8fe0c6a28cd108f33f9632fe82411 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b3a47cef61d53f9997148c1502dbb4ec313a6a47 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5b530ad1e484936df857104583ffcaf78b1715e3 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..96950282b9778bf3a3bd26d4c9125ffdc0f95c96 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b447f8aeb87c42e4489c0db15f30f93d4707fa45 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..11a94c16234d6e460c6e66a840ac7976e756c458 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d325421d690f179c4626844df98915a36e74ea Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0089e0cb796e6183029df4847b1b600d974a8b1a Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..6abfe2c3ec3923ddea06c4e6f4a4a440d72fa649 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar new file mode 100644 index 0000000000000000000000000000000000000000..99c263dc3f71a215a4a85901538f1dedacb51acf Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d9dbbe1b73c8f5435b7309f77520f6cbff82600c Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5d3b9086c238e3c3dbb2af68ce47f52eb269c664 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c03be12b0b2c7549c2db53ed275e45cd87e8a4dc Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0bf27883a28a42e50d17bca78ce368d79acaea13 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f440dcb8f735c055768cd95c2bc4457c4cbbb48 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e7adeb414fcb6108390beabc72d94e7f933e055d Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fba12107dee341bd5894a0e05deb09280561e517 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..48ec53f0dba9ea734d83d21f7d793bb2178bb8d1 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff161c12c4949a473a1d2ed1f597d6338337a321 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..577b2bb43f4750ecb4f5c1b9ba952d3866bcea22 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..10a28407ba7c16165164c35be0f9657812349a84 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d0ee5b30dbe7a8cb00c15600142ef6041a97f44 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e3c7750dd2c267fea46f510636ae576bf26894a Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a38e1850efbb73aaf248318253b11e0d0a64a50e Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9e257c119006bed0c77c8b8a6b2e05adfc71dde6 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a85e9e76c2870fca83485b31334a81a02f12781 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..548a6405f93dd3254c1123ecbcce2f96f6995e30 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f65840267108fe2b704976f14e3c17bd02290e20 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..68b053f16299ba0a9bcd80c6e795c2c9e0b186a1 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..89aaf8be3250033348a32ac75c1edc52d9d264ac Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..add785757c2a958f86e3a20c6034d46a38427d83 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e6f98b3a199f27f5c5ab47000862c0558116f3ef Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa88a2ec11f998225646b08795fedc215e530e6f Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cfd8c45d73d6d6e8da9c8fd24e04b2dcb86bff83 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6438a41b88c1605a994451cda5657f0a52669ad Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4bb4f9457907978ea011d1fc390c6ab2e6217888 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..494d0bb23d9ae6619c2a8f2e18ec458676f39316 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8ec4c28f2f004dc6d7b8ca0d36eeaa4b79f4a4bd Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d5bfe9c8b3c66f75327e6ce621da822c6b67fc Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..90607ec6492a8667ea6d2c39f77389e4083a2017 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8f8612553283b7f00d9dc7ab7eb5313b2f1eca2 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b0c51a8f29ef87a1c6390dd98c2e1e6aa0114f7 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1dcea88498ba439a3ac32e291067331c99331ace Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..92721ae048041c7dbddda0c88d417d740f3c299d Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3199cb971aa88ab386df667ad1d16d74d3bee22f Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c0ebec7fa19a8e96489714a2f9986017522becfb Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4696de7fd6cd077015cad5212b223de1dcc80307 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..04bc9cc78782c3b7bc78553481b94a10a02f8f9d Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..14d1445dba365c803f94f24f6fcb8467d13cf6e5 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5859e692a03f1173aa1f2b272a844c94d24c12e3 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c8cb76d3f0d9c0853168e740754f4d79042a105c Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b038ad4a83ac6df0e0a8f71265877dad2ded43ab Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c63764d56c7a95f9abf5230b80fae1a414651b61 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df5d82b54357c697c0d95fd9b8db277facfe0cde Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..07d3721a9c5c5e314e5eee21ec168139bcfed6c4 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8f7f3668d903b867c021d47fd08c02c45374e29c Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a774ecd7174486d0c6d077c388f1021c3b2ef05 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f90f28b0d6afa7518cd3095f64d8dbd9fd56b77d Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ba5add79fc5dee32ae8d3dfc0ffeebe75541b603 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..d32bd30fa7bfa74da1330810bc3eb3c07f174385 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..cc828adf40879d957e266f0b781f7bb3d864af07 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..08a59e5bb2d5572b0fdbe693f48a3f0a5073d4f0 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b821617242260f35360fb4247a01d66dea563004 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9908f1069718a85031db676224857e26dc7401f0 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d09f435a3cd1ae0395b4926c78311be6b276e0c2 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9bf5d12b59534df460f6ccb01dad8c8ffcd8a542 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3c5639431a5d51ef16ef94d82225effb271e8467 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d55e5f07ddae6c98b08fbff4848461fdbb2adf73 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..760d92fc11fe3672030cdd7c71416ad75b8ac775 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7504902f7621ee1764e207e2f5b2ffdeaa22e261 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..11867e294ef35ca6e9a6a82fb52dd52821c35d48 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e268ff4ef4543b76568d66444df43ce45751d3bc Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..662cf14d5c427f09ea259c484befd4cc2d328b2d Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..417d8b59b8ce5998e21e0275beb878ac50008bd8 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d3ab26e2476843ebb0d27c983443ff9a7ba3fee8 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f08a939bd2b90b9f87dc3f15ab88e15e8c48087e Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..abb08bb751b0e037476fc15d4da40a44d5432966 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57bc48ad35c44ab05ee4f8575f552ea0dd3b04c2 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..98d6fa439ad69c1dc9436d76d43e3a34636729ad Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..373471c5b69b1468fff9a50d614c1b84fa69fbdd Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff59cf0f6f80ce2f55e1f08afbd1750c7df6e2fc Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..49443391feae0e4b18c570a3a007db6187108031 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..723430b5a2624a415e2754bad5f69fd151e71dd3 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6169a6b468ab05de2c7962fc64ce8567f46c444 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b7cc8f08c3aa4845416101f90521f8736eef6973 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f1d68c985deb0d3b5c865a91ca3489e94d623dcf Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..99dc47ca15667630606af6d41f5e616acd66b168 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c7f07c892fbe634044ba8b8333aac2195d84a4b0 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..97a49b08f496d01d6a50e3b2455476c5b23ba50d Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ece3eb2d02159abb38ed4ea6b8fc4b599616c79e Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9ed9c88db7be8648c5b501b787a1addf9f0c1981 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b8cc43f8b9896a24d916d7bfdbdd02e396d7db1 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b232bcabeac47c30ac02371188d10bdb5d214a88 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8701dd9d98c6c7d61ea547823759f909474f7a9d Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..473ce54348410627710157274dd8d2fdfa39c0df Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..16255cf4093cd574a00574f812ee5109478fe9a4 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c3c712e90ce6ebdae145eec147d6d4a50bd0fe53 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..354030f4393f7f920b6e03ff894d8e5ea8727797 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..284a20484992c849fd9a5fb0f83f8f639f5d562f Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..467756a558a61103b2dc767e0833e561540ed8fd Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..477c706ef15c0f6622b55e64d3cb623cb3fd3ebd Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..305727ab77b9dcd6df44f8868efc6859b51a419e Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8c00aa871496b3293e1c008449162d449b9f64ea Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8a4fe6ed7c025ce502f81bbee92b36230457d647 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c322385c3404a0196e8d509f2f7cb892fcb0883a Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..2e158629507bc39be5c13b1b31fa3563e0ac44b1 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..b16bdbbc285bd848e6dac42208acfa3d6bfa6fab Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..0d015d54205c084ad31609cc2909853fee83476a Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..34c1d4394b358572a2c79b543cb7d094b0ba1b8d Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..316080f3b56134e8a821a50511b0e831886c9184 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5edfd71412f1470c93366d821b353ebaa85ecac3 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a3214715591141e2d6fbb0f5b71f52126a571a5 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a92e099d4fc25523e2830fa9b8181d319c9369a7 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a5b5b5fc36bb88093fd25a30b2d1d7fbe9e3cba Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..dbcbea90f1434f04d12c2039f9213c704d82ec31 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a5d9089734b0a7061dc14c4afc35884cc507636 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1e7da4be43dd1a10393608d1aff9c7a87460461 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..80670a1b87a7680fbac8c690d599361f8dd8d2ea Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..48242829bc38133a9cdcd36f8b2a9eebc53ab91a Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bffe4865f5835839900292dce062bb2f24921d76 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d8c4c56c7943f2fb3fcf2207f77f3bb7dd623550 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57b4cba6199355aa6c8fc9ad96e01f122e8613ff Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c4fe13ea86560b5be41f0602c8cef57dac8fed8 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d3e9ba64ae347356342ce7bd81694068426ce4 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..29fb4d66863e0ed67c991f16d7224d8e1b88ed98 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..83006f2ec04934050d5d41b033761f8c6f93c80a Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..66bc6931d673d72136bf7634d9d0653576bb05de Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fb9dadfc4843c7b3bd48544fe9c39068014ae49 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57eb121f34753b359ed328615b12d1335cdef5e0 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..666c3e4d030b41a58901b540d76547698cea41b4 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f5768a039af60a776a24b738659227810a5fc563 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b2d092cdf3c4b145df42a44b536ed0f8a604574c Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..08f353ce447773c14714390536f2abde814dff5b Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df32ff7e11ad182102a805c854bfc71cf2fec90e Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a93a240a399ca96f7f6298a849de2fdc996f066d Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8b05ebef8ea87d46d09eac112c1e8a5a9a549fc Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..eab628714f1e46bbabce09f59d5149071cb7b8ee Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bcf2ba147eb2b4d9c75a637aed2c96628a8369cd Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e12f44ed11522b1f45d2935972336fb6852cb26d Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7b0a2bef9c92372239c8136f4bacad2e44297a95 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..97e7161abe0534de78448dde1d119002283ced55 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a4e88280321db778bbdff6fce3aed8e6ce6e4980 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..da9302ff29a560b5f10d3184f25d699fe2d9c186 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..2bb7c07363c9a44ea63fe96c827a34e296b8188c Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..9a8ac703dcd1b00c37aa6f8dc9a8a9b3d42145f6 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar new file mode 100644 index 0000000000000000000000000000000000000000..f0bdebeb94bce461c49ded7e28d6e6c63bf6a367 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b3c8ff0adc42f592363a883cd691d292aada837 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa7fdabb307af8221e7e0a1526f2c97d6ba35ec4 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..49953e8fa25ed42f4127011561a6e84869fe5d82 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..d139727d04b6b6acdfcb520566c8c60cbbcb7fb1 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6077c71e50c276649060a8fac39f6384fa67019 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d2e53ffaa9f0685843fd2b35fe18afd543249a Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c9b38d9c315bae8eb8c7d4eeacb26a8f2ca16085 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..50933a1fdc4d2f285542845bb89f9b34cef192f2 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..45581840b5a7e858949ee6198f8a1f7bd772fb32 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar new file mode 100644 index 0000000000000000000000000000000000000000..e3432492d5d204a1745e138497d9ede28ffb854e Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d2ade3e92ab51059b64ce3026377e1bd5991777 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ed4bb45df0ea4a6cb61ce9c7247b6de29ab4f98a Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..816ea17eb611606001129921b297615bcd2419fd Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..ffc43704764cd535486c28401c92f2548bd52c5f Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..0e9fcc76760c4fc3fe5fbce69a4a37783d912a11 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..d7987d7c1b270f153557179abaf61c87ed62e875 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d218bacf88c766dba04ef14130fd7e69181ffed Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e90b7796c1a5d7264bc613050e05f20a674c010 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa3474de5d48916f2ded456deab5671a1375645b Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..657afffffdf6324a62474b9499dd50cf9529609f Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..26cbae47e5f0db078fc43501ed3d1b4f95a1befa Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a66a92ba5247b5a1f8e201d11643e4fb11c2a8c Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..8da196f20fde587682295ac0c90f31ba4ab23815 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..31cf0b60867242d385d764dcea99adadf7ed6ded Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..89c644b8e286e9da107d81de25f1be0fe6447607 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..d530cd128ec0d314490c0e1e5ef68479cd23d366 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..091b4d13a417d635f5a1d7a42b482f88a6f3bf65 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..e58c483f97589c9712eda2273a37e25344573390 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar new file mode 100644 index 0000000000000000000000000000000000000000..a2cb8020a5afda869b487e2f9d172dcd1e9795bf Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..511ff3211d9b29bce06c3576ddcf0139fc874bb0 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..7707e5878b8525da8750949186a3ab1056ecb5c5 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..275dd5700a389ba1902a02d49e465157942368ce Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/quarkus-app-dependencies.txt b/scorpio-broker/RegistryManager/target/quarkus-app/quarkus-app-dependencies.txt new file mode 100644 index 0000000000000000000000000000000000000000..0aa0b1a4e39e9ac14c9739186a382f5a7784a7d6 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/quarkus-app/quarkus-app-dependencies.txt @@ -0,0 +1,258 @@ +com.aayushatharva.brotli4j:brotli4j::jar:1.16.0 +com.aayushatharva.brotli4j:native-linux-x86_64::jar:1.16.0 +com.aayushatharva.brotli4j:service::jar:1.16.0 +com.cronutils:cron-utils::jar:9.2.1 +com.fasterxml.jackson.core:jackson-annotations::jar:2.17.2 +com.fasterxml.jackson.core:jackson-core::jar:2.17.2 +com.fasterxml.jackson.core:jackson-databind::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-toml::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-yaml::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jdk8::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jsr310::jar:2.17.2 +com.fasterxml.jackson.module:jackson-module-parameter-names::jar:2.17.2 +com.github.ben-manes.caffeine:caffeine::jar:3.1.5 +com.github.filosganga:geogson-core::jar:1.2.21 +com.github.filosganga:geogson-jts::jar:1.2.21 +com.github.luben:zstd-jni::jar:1.5.6-3 +com.google.code.gson:gson::jar:2.11.0 +com.google.errorprone:error_prone_annotations::jar:2.30.0 +com.google.guava:failureaccess::jar:1.0.1 +com.google.guava:guava::jar:33.2.1-jre +com.google.j2objc:j2objc-annotations::jar:2.8 +com.ongres.scram:client::jar:2.1 +com.ongres.scram:common::jar:2.1 +com.ongres.stringprep:saslprep::jar:1.1 +com.ongres.stringprep:stringprep::jar:1.1 +com.vividsolutions:jts-core::jar:1.14.0 +commons-codec:commons-codec::jar:1.17.1 +commons-io:commons-io::jar:2.16.1 +eu.neclab.ngsildbroker:commons::jar:5.0.5-SNAPSHOT +io.agroal:agroal-api::jar:2.5 +io.agroal:agroal-narayana::jar:2.5 +io.agroal:agroal-pool::jar:2.5 +io.github.crac:org-crac::jar:0.1.3 +io.netty:netty-buffer::jar:4.1.111.Final +io.netty:netty-codec-dns::jar:4.1.111.Final +io.netty:netty-codec-haproxy::jar:4.1.111.Final +io.netty:netty-codec-http2::jar:4.1.111.Final +io.netty:netty-codec-http::jar:4.1.111.Final +io.netty:netty-codec-mqtt::jar:4.1.111.Final +io.netty:netty-codec-socks::jar:4.1.111.Final +io.netty:netty-codec::jar:4.1.111.Final +io.netty:netty-common::jar:4.1.111.Final +io.netty:netty-handler-proxy::jar:4.1.111.Final +io.netty:netty-handler::jar:4.1.111.Final +io.netty:netty-resolver-dns::jar:4.1.111.Final +io.netty:netty-resolver::jar:4.1.111.Final +io.netty:netty-transport-classes-epoll::jar:4.1.111.Final +io.netty:netty-transport-classes-kqueue::jar:4.1.111.Final +io.netty:netty-transport-native-epoll:linux-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-kqueue:osx-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-unix-common::jar:4.1.111.Final +io.netty:netty-transport::jar:4.1.111.Final +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api-incubator::jar:2.5.0-alpha +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api::jar:2.5.0 +io.opentelemetry.semconv:opentelemetry-semconv::jar:1.26.0-alpha +io.opentelemetry:opentelemetry-api-incubator::jar:1.39.0-alpha +io.opentelemetry:opentelemetry-api::jar:1.39.0 +io.opentelemetry:opentelemetry-context::jar:1.39.0 +io.quarkiverse.loggingmanager:quarkus-logging-manager::jar:3.1.2 +io.quarkiverse.systemd.notify:quarkus-systemd-notify::jar:1.0.2 +io.quarkus.arc:arc::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common-types::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-jackson::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-vertx::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive::jar:3.14.1 +io.quarkus.security:quarkus-security::jar:2.1.0 +io.quarkus.vertx.utils:quarkus-vertx-utils::jar:3.14.1 +io.quarkus:quarkus-agroal::jar:3.14.1 +io.quarkus:quarkus-arc::jar:3.14.1 +io.quarkus:quarkus-bootstrap-runner::jar:3.14.1 +io.quarkus:quarkus-cache-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-cache::jar:3.14.1 +io.quarkus:quarkus-caffeine::jar:3.14.1 +io.quarkus:quarkus-classloader-commons::jar:3.14.1 +io.quarkus:quarkus-container-image-docker-common::jar:3.14.1 +io.quarkus:quarkus-container-image-docker::jar:3.14.1 +io.quarkus:quarkus-container-image::jar:3.14.1 +io.quarkus:quarkus-core::jar:3.14.1 +io.quarkus:quarkus-credentials::jar:3.14.1 +io.quarkus:quarkus-datasource-common::jar:3.14.1 +io.quarkus:quarkus-datasource::jar:3.14.1 +io.quarkus:quarkus-development-mode-spi::jar:3.14.1 +io.quarkus:quarkus-flyway-postgresql::jar:3.14.1 +io.quarkus:quarkus-flyway::jar:3.14.1 +io.quarkus:quarkus-fs-util::jar:0.0.10 +io.quarkus:quarkus-info-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-info::jar:3.14.1 +io.quarkus:quarkus-jackson::jar:3.14.1 +io.quarkus:quarkus-jdbc-postgresql::jar:3.14.1 +io.quarkus:quarkus-jsonp::jar:3.14.1 +io.quarkus:quarkus-kafka-client::jar:3.14.1 +io.quarkus:quarkus-messaging-kafka::jar:3.14.1 +io.quarkus:quarkus-messaging-kotlin::jar:3.14.1 +io.quarkus:quarkus-messaging::jar:3.14.1 +io.quarkus:quarkus-mutiny-reactive-streams-operators::jar:3.14.1 +io.quarkus:quarkus-mutiny::jar:3.14.1 +io.quarkus:quarkus-narayana-jta::jar:3.14.1 +io.quarkus:quarkus-netty::jar:3.14.1 +io.quarkus:quarkus-reactive-datasource::jar:3.14.1 +io.quarkus:quarkus-reactive-pg-client::jar:3.14.1 +io.quarkus:quarkus-rest-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson::jar:3.14.1 +io.quarkus:quarkus-rest::jar:3.14.1 +io.quarkus:quarkus-scheduler-api::jar:3.14.1 +io.quarkus:quarkus-scheduler-common::jar:3.14.1 +io.quarkus:quarkus-scheduler-kotlin::jar:3.14.1 +io.quarkus:quarkus-scheduler-spi::jar:3.14.1 +io.quarkus:quarkus-scheduler::jar:3.14.1 +io.quarkus:quarkus-security-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-smallrye-context-propagation::jar:3.14.1 +io.quarkus:quarkus-smallrye-health::jar:3.14.1 +io.quarkus:quarkus-smallrye-metrics::jar:3.14.1 +io.quarkus:quarkus-smallrye-openapi::jar:3.14.1 +io.quarkus:quarkus-swagger-ui::jar:3.14.1 +io.quarkus:quarkus-tls-registry::jar:3.14.1 +io.quarkus:quarkus-transaction-annotations::jar:3.14.1 +io.quarkus:quarkus-vertx-http::jar:3.14.1 +io.quarkus:quarkus-vertx-latebound-mdc-provider::jar:3.14.1 +io.quarkus:quarkus-vertx::jar:3.14.1 +io.quarkus:quarkus-virtual-threads::jar:3.14.1 +io.smallrye.common:smallrye-common-annotation::jar:2.5.0 +io.smallrye.common:smallrye-common-classloader::jar:2.5.0 +io.smallrye.common:smallrye-common-constraint::jar:2.5.0 +io.smallrye.common:smallrye-common-cpu::jar:2.5.0 +io.smallrye.common:smallrye-common-expression::jar:2.5.0 +io.smallrye.common:smallrye-common-function::jar:2.5.0 +io.smallrye.common:smallrye-common-io::jar:2.5.0 +io.smallrye.common:smallrye-common-net::jar:2.5.0 +io.smallrye.common:smallrye-common-os::jar:2.5.0 +io.smallrye.common:smallrye-common-ref::jar:2.5.0 +io.smallrye.common:smallrye-common-vertx-context::jar:2.5.0 +io.smallrye.config:smallrye-config-common::jar:3.9.1 +io.smallrye.config:smallrye-config-core::jar:3.9.1 +io.smallrye.config:smallrye-config::jar:3.9.1 +io.smallrye.reactive:mutiny-reactive-streams-operators::jar:2.6.2 +io.smallrye.reactive:mutiny-smallrye-context-propagation::jar:2.6.2 +io.smallrye.reactive:mutiny-zero-flow-adapters::jar:1.1.0 +io.smallrye.reactive:mutiny-zero::jar:1.1.0 +io.smallrye.reactive:mutiny::jar:2.6.2 +io.smallrye.reactive:smallrye-mutiny-vertx-auth-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-bridge-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-core::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-mqtt::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-pg-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-runtime::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-sql-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-uri-template::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web::jar:3.14.0 +io.smallrye.reactive:smallrye-reactive-converter-api::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-converter-mutiny::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-messaging-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-camel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-health::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-in-memory::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-otel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-provider::jar:4.24.0 +io.smallrye.reactive:vertx-mutiny-generator::jar:3.14.0 +io.smallrye:jandex::jar:3.2.2 +io.smallrye:smallrye-context-propagation-api::jar:2.1.2 +io.smallrye:smallrye-context-propagation-jta::jar:2.1.2 +io.smallrye:smallrye-context-propagation-storage::jar:2.1.2 +io.smallrye:smallrye-context-propagation::jar:2.1.2 +io.smallrye:smallrye-fault-tolerance-vertx::jar:6.4.0 +io.smallrye:smallrye-health-api::jar:4.1.0 +io.smallrye:smallrye-health-provided-checks::jar:4.1.0 +io.smallrye:smallrye-health::jar:4.1.0 +io.smallrye:smallrye-metrics::jar:4.0.0 +io.smallrye:smallrye-open-api-core::jar:3.10.0 +io.vertx:vertx-auth-common::jar:4.5.9 +io.vertx:vertx-bridge-common::jar:4.5.9 +io.vertx:vertx-codegen::jar:4.5.9 +io.vertx:vertx-core::jar:4.5.9 +io.vertx:vertx-mqtt::jar:4.5.9 +io.vertx:vertx-pg-client::jar:4.5.9 +io.vertx:vertx-sql-client::jar:4.5.9 +io.vertx:vertx-uri-template::jar:4.5.9 +io.vertx:vertx-web-client::jar:4.5.9 +io.vertx:vertx-web-common::jar:4.5.9 +io.vertx:vertx-web::jar:4.5.9 +jakarta.activation:jakarta.activation-api::jar:2.1.3 +jakarta.annotation:jakarta.annotation-api::jar:3.0.0 +jakarta.el:jakarta.el-api::jar:5.0.1 +jakarta.enterprise:jakarta.enterprise.cdi-api::jar:4.1.0 +jakarta.enterprise:jakarta.enterprise.lang-model::jar:4.1.0 +jakarta.inject:jakarta.inject-api::jar:2.0.1 +jakarta.interceptor:jakarta.interceptor-api::jar:2.2.0 +jakarta.json:jakarta.json-api::jar:2.1.3 +jakarta.resource:jakarta.resource-api::jar:2.1.0 +jakarta.transaction:jakarta.transaction-api::jar:2.0.1 +jakarta.ws.rs:jakarta.ws.rs-api::jar:3.1.0 +jakarta.xml.bind:jakarta.xml.bind-api::jar:4.0.2 +org.apache.camel.quarkus:camel-quarkus-core::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-reactive-streams::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-smallrye-reactive-messaging::jar:3.14.0 +org.apache.camel:camel-api::jar:4.7.0 +org.apache.camel:camel-base-engine::jar:4.7.0 +org.apache.camel:camel-base::jar:4.7.0 +org.apache.camel:camel-componentdsl::jar:4.7.0 +org.apache.camel:camel-core-catalog::jar:4.7.0 +org.apache.camel:camel-core-engine::jar:4.7.0 +org.apache.camel:camel-core-languages::jar:4.7.0 +org.apache.camel:camel-core-model::jar:4.7.0 +org.apache.camel:camel-core-processor::jar:4.7.0 +org.apache.camel:camel-core-reifier::jar:4.7.0 +org.apache.camel:camel-endpointdsl::jar:4.7.0 +org.apache.camel:camel-main::jar:4.7.0 +org.apache.camel:camel-management-api::jar:4.7.0 +org.apache.camel:camel-microprofile-config::jar:4.7.0 +org.apache.camel:camel-reactive-streams::jar:4.7.0 +org.apache.camel:camel-support::jar:4.7.0 +org.apache.camel:camel-tooling-model::jar:4.7.0 +org.apache.camel:camel-util-json::jar:4.7.0 +org.apache.camel:camel-util::jar:4.7.0 +org.apache.camel:camel-xml-jaxp-util::jar:4.7.0 +org.apache.commons:commons-lang3::jar:3.14.0 +org.apache.httpcomponents:httpclient-cache::jar:4.5.14 +org.apache.httpcomponents:httpclient::jar:4.5.14 +org.apache.httpcomponents:httpcore::jar:4.4.16 +org.apache.kafka:kafka-clients::jar:3.7.1 +org.checkerframework:checker-qual::jar:3.46.0 +org.eclipse.microprofile.config:microprofile-config-api::jar:3.1 +org.eclipse.microprofile.context-propagation:microprofile-context-propagation-api::jar:1.3 +org.eclipse.microprofile.health:microprofile-health-api::jar:4.0.1 +org.eclipse.microprofile.metrics:microprofile-metrics-api::jar:4.0.1 +org.eclipse.microprofile.openapi:microprofile-openapi-api::jar:3.1.1 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-api::jar:3.0 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-core::jar:3.0 +org.eclipse.parsson:parsson::jar:1.1.7 +org.flywaydb:flyway-core::jar:10.17.1 +org.flywaydb:flyway-database-postgresql::jar:10.17.1 +org.glassfish.expressly:expressly::jar:5.0.0 +org.jboss.invocation:jboss-invocation::jar:2.0.0.Final +org.jboss.logging:commons-logging-jboss-logging::jar:1.0.0.Final +org.jboss.logging:jboss-logging-annotations::jar:3.0.1.Final +org.jboss.logging:jboss-logging::jar:3.6.0.Final +org.jboss.logmanager:jboss-logmanager::jar:3.0.6.Final +org.jboss.narayana.jta:narayana-jta::jar:7.0.2.Final +org.jboss.narayana.jts:narayana-jts-integration::jar:7.0.2.Final +org.jboss.slf4j:slf4j-jboss-logmanager::jar:2.0.0.Final +org.jboss.threads:jboss-threads::jar:3.6.1.Final +org.jboss:jboss-transaction-spi::jar:8.0.0.Final +org.jctools:jctools-core::jar:4.0.5 +org.locationtech.jts:jts-core::jar:1.18.2 +org.locationtech.spatial4j:spatial4j::jar:0.8 +org.lz4:lz4-java::jar:1.8.0 +org.noggit:noggit::jar:0.8 +org.postgresql:postgresql::jar:42.7.4 +org.reactivestreams:reactive-streams::jar:1.0.4 +org.slf4j:slf4j-api::jar:2.0.6 +org.wildfly.common:wildfly-common::jar:1.7.0.Final +org.xerial.snappy:snappy-java::jar:1.1.10.5 +org.yaml:snakeyaml::jar:2.2 diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/quarkus-run.jar b/scorpio-broker/RegistryManager/target/quarkus-app/quarkus-run.jar new file mode 100644 index 0000000000000000000000000000000000000000..ab42b8f85175cd2351788e7af56569a3b816e367 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/quarkus-run.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/quarkus/generated-bytecode.jar b/scorpio-broker/RegistryManager/target/quarkus-app/quarkus/generated-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..cf5f5ea176b947feb3ace39a4497b5c10dd89047 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/quarkus/generated-bytecode.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/quarkus/quarkus-application.dat b/scorpio-broker/RegistryManager/target/quarkus-app/quarkus/quarkus-application.dat new file mode 100644 index 0000000000000000000000000000000000000000..9fc3e4dcb09347eb10a374df5d10ef1d71a18dca Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/quarkus/quarkus-application.dat differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-app/quarkus/transformed-bytecode.jar b/scorpio-broker/RegistryManager/target/quarkus-app/quarkus/transformed-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..b6ee9171140fff36b1e1c34a8eb146c44916943b Binary files /dev/null and b/scorpio-broker/RegistryManager/target/quarkus-app/quarkus/transformed-bytecode.jar differ diff --git a/scorpio-broker/RegistryManager/target/quarkus-artifact.properties b/scorpio-broker/RegistryManager/target/quarkus-artifact.properties new file mode 100644 index 0000000000000000000000000000000000000000..00195d168ac768606b0b88968bbefbf7ef8b89c5 --- /dev/null +++ b/scorpio-broker/RegistryManager/target/quarkus-artifact.properties @@ -0,0 +1,4 @@ +# Generated by Quarkus - Do not edit manually +metadata.container-image=scorpiobroker/registry-manager\:5.0.5-SNAPSHOT +metadata.pull-required=false +type=jar-container diff --git a/scorpio-broker/RegistryManager/target/registry-manager-5.0.5-SNAPSHOT.jar b/scorpio-broker/RegistryManager/target/registry-manager-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..393d2864d96284efdc59d41b5392679690738143 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/registry-manager-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/RegistryManager/target/test-classes/eu/neclab/ngsildbroker/registryhandler/controller/CustomProfile.class b/scorpio-broker/RegistryManager/target/test-classes/eu/neclab/ngsildbroker/registryhandler/controller/CustomProfile.class new file mode 100644 index 0000000000000000000000000000000000000000..a9060c5582ac95fc163289aabe3bff4a5e4f13a9 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/test-classes/eu/neclab/ngsildbroker/registryhandler/controller/CustomProfile.class differ diff --git a/scorpio-broker/RegistryManager/target/test-classes/eu/neclab/ngsildbroker/registryhandler/controller/RegistryControllerTest.class b/scorpio-broker/RegistryManager/target/test-classes/eu/neclab/ngsildbroker/registryhandler/controller/RegistryControllerTest.class new file mode 100644 index 0000000000000000000000000000000000000000..719a4265a18b211277dc4ec6b631f6800697edff Binary files /dev/null and b/scorpio-broker/RegistryManager/target/test-classes/eu/neclab/ngsildbroker/registryhandler/controller/RegistryControllerTest.class differ diff --git a/scorpio-broker/RegistryManager/target/test-classes/eu/neclab/ngsildbroker/registryhandler/service/CSourceServiceTest.class b/scorpio-broker/RegistryManager/target/test-classes/eu/neclab/ngsildbroker/registryhandler/service/CSourceServiceTest.class new file mode 100644 index 0000000000000000000000000000000000000000..e48d819727c4f9b85cc8fa032a5f300ece9fda81 Binary files /dev/null and b/scorpio-broker/RegistryManager/target/test-classes/eu/neclab/ngsildbroker/registryhandler/service/CSourceServiceTest.class differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/META-INF/jandex.idx b/scorpio-broker/RegistrySubscriptionManager/target/classes/META-INF/jandex.idx new file mode 100644 index 0000000000000000000000000000000000000000..d00da3dfcb36b836716191a7946694f616f55b28 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/classes/META-INF/jandex.idx differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/application-activemq.properties b/scorpio-broker/RegistrySubscriptionManager/target/classes/application-activemq.properties new file mode 100644 index 0000000000000000000000000000000000000000..b4b8d83eb2ff5ef7c5f7d869df3b797b39da0f1f --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/application-activemq.properties @@ -0,0 +1,21 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:61616} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=activemq +mysettings.messageconnection.options= +camel.component.activemq.broker-url=${bootstrap.servers} + + +scorpio.messaging.maxSize=1048576 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### + + +#readability block########### + + + diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/application-kafka.properties b/scorpio-broker/RegistrySubscriptionManager/target/classes/application-kafka.properties new file mode 100644 index 0000000000000000000000000000000000000000..758c985fc6406b762e7c2409f5282ebe8a295dd3 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/application-kafka.properties @@ -0,0 +1,32 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:9092} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +#Kafka settings +scorpio.messaging.maxSize=1048576 +kafka.bootstrap.servers=${bootstrap.servers} +mp.messaging.incoming.registryretrieve.connector=smallrye-kafka +mp.messaging.incoming.registryretrieve.topic=${scorpio.topics.registry} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### +mp.messaging.outgoing.subalive.connector=smallrye-kafka +mp.messaging.outgoing.subalive.topic=${scorpio.topics.subalive} +mp.messaging.outgoing.subalive.broadcast=true +#mp.messaging.outgoing.subalive.value.serializer=eu.neclab.ngsildbroker.commons.serialization.messaging.AliveAnnouncementSerializer +mp.messaging.outgoing.subalive.merge=true +mp.messaging.incoming.subaliveretrieve.connector=smallrye-kafka +mp.messaging.incoming.subaliveretrieve.topic=${scorpio.topics.subalive} +mp.messaging.incoming.subaliveretrieve.auto.offset.reset=latest +mp.messaging.incoming.subaliveretrieve.broadcast=true +#mp.messaging.incoming.subaliveretrieve.value.deserializer=eu.neclab.ngsildbroker.commons.serialization.messaging.AliveAnnouncementDeserializer +#readability block########### +mp.messaging.outgoing.subsync.connector=smallrye-kafka +mp.messaging.outgoing.subsync.topic=${scorpio.topics.subsync} +mp.messaging.outgoing.subsync.broadcast=true +#mp.messaging.outgoing.subsync.value.serializer=eu.neclab.ngsildbroker.commons.serialization.messaging.SyncMessageSerializer +mp.messaging.outgoing.subsync.merge=true +mp.messaging.incoming.subsyncretrieve.connector=smallrye-kafka +mp.messaging.incoming.subsyncretrieve.topic=${scorpio.topics.subsync} +mp.messaging.incoming.subsyncretrieve.auto.offset.reset=latest +mp.messaging.incoming.subsyncretrieve.broadcast=true + diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/application-mqtt.properties b/scorpio-broker/RegistrySubscriptionManager/target/classes/application-mqtt.properties new file mode 100644 index 0000000000000000000000000000000000000000..ffc1736376154651fe9394627a560147e8556022 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/application-mqtt.properties @@ -0,0 +1,36 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:1883} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=paho-mqtt5 +mysettings.messageconnection.options= +camel.component.paho-mqtt5.broker-url=tcp://${bootstrap.servers} + +scorpio.messaging.maxSize=268435455 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### + +#readability block########### +mp.messaging.outgoing.subalive.connector=smallrye-kafka +mp.messaging.outgoing.subalive.topic=${scorpio.topics.subalive} +mp.messaging.outgoing.subalive.merge=true +mp.messaging.incoming.subaliveretrieve.connector=smallrye-kafka +mp.messaging.incoming.subaliveretrieve.topic=${scorpio.topics.subalive} +mp.messaging.incoming.subaliveretrieve.auto.offset.reset=latest +mp.messaging.incoming.subaliveretrieve.broadcast=true + +#readability block########### +mp.messaging.outgoing.subsync.connector=smallrye-kafka +mp.messaging.outgoing.subsync.topic=${scorpio.topics.subsync} +mp.messaging.outgoing.subsync.merge=true +mp.messaging.incoming.subsyncretrieve.connector=smallrye-kafka +mp.messaging.incoming.subsyncretrieve.topic=${scorpio.topics.subsync} +mp.messaging.incoming.subsyncretrieve.auto.offset.reset=latest +mp.messaging.incoming.subsyncretrieve.broadcast=true +#readability block########### + + + diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/application-rabbitmq.properties b/scorpio-broker/RegistrySubscriptionManager/target/classes/application-rabbitmq.properties new file mode 100644 index 0000000000000000000000000000000000000000..2cdd84b9d4c92152d2b20bcd61f6eb3527bb3684 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/application-rabbitmq.properties @@ -0,0 +1,38 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:5672} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=rabbitmq +mysettings.messageconnection.options= +#?addresses=localhost:5672 +scorpio.messaging.maxSize=134217728 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### + +#readability block########### +mp.messaging.outgoing.subalive.connector=smallrye-kafka +mp.messaging.outgoing.subalive.topic=${scorpio.topics.subalive} +mp.messaging.outgoing.subalive.merge=true +mp.messaging.incoming.subaliveretrieve.connector=smallrye-kafka +mp.messaging.incoming.subaliveretrieve.topic=${scorpio.topics.subalive} +mp.messaging.incoming.subaliveretrieve.auto.offset.reset=latest +mp.messaging.incoming.subaliveretrieve.broadcast=true + +#readability block########### +mp.messaging.outgoing.subsync.connector=smallrye-kafka +mp.messaging.outgoing.subsync.topic=${scorpio.topics.subsync} +mp.messaging.outgoing.subsync.merge=true +mp.messaging.incoming.subsyncretrieve.connector=smallrye-kafka +mp.messaging.incoming.subsyncretrieve.topic=${scorpio.topics.subsync} +mp.messaging.incoming.subsyncretrieve.auto.offset.reset=latest +mp.messaging.incoming.subsyncretrieve.broadcast=true +#readability block########### + + +camel.component.rabbitmq.hostname=localhost +camel.component.rabbitmq.port-number=5672 + + diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/application-sqs.properties b/scorpio-broker/RegistrySubscriptionManager/target/classes/application-sqs.properties new file mode 100644 index 0000000000000000000000000000000000000000..8797890047c9fcadb6c06fdef7eccfcf645e7326 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/application-sqs.properties @@ -0,0 +1,20 @@ +#mysettings.kafka.bootstrap.host=${bushost:localhost} +#mysettings.kafka.bootstrap.port=${busport:5672} +#bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +#camel.component.aws2-sqs.maximum-message-size=10485760 +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +mysettings.messageconnection.protocol=sns-fanout + +mysettings.messageconnection.options= +#?addresses=localhost:5672 +scorpio.messaging.maxSize=262144 +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### + + + + + diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/application.properties b/scorpio-broker/RegistrySubscriptionManager/target/classes/application.properties new file mode 100644 index 0000000000000000000000000000000000000000..caca6618081a283addaeb409e052c1a4d35f6af9 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/application.properties @@ -0,0 +1,41 @@ +quarkus.application.name=registry-subscription-manager +quarkus.http.port=2025 +quarkus.log.level=INFO +quarkus.ssl.native=true +#quarkus.log.category."eu.neclab".level=DEBUG +#quarkus.vertx.event-loops-pool-size=32 +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +mysettings.postgres.host=${dbhost:localhost} +mysettings.postgres.port=${dbport:5432} +mysettings.postgres.username=${dbuser:ngb} +mysettings.postgres.password=${dbpass:ngb} +mysettings.postgres.database-name=${dbname:ngb} +mysettings.gateway.host=${gateway.host:localhost} +mysettings.gateway.port=${gateway.port:9090} +scorpio.at-context-server=http://at-context-server:2023 +atcontext.url=${scorpio.at-context-server}/ngsi-ld/v1/jsonldContexts/ +jdbcurl=jdbc:postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name}?ApplicationName=${quarkus.application.name} +scorpio.gatewayurl=http://localhost:9090 +scorpio.directDB=true +scorpio.topics.registry=REGISTRY +scorpio.topics.internalnotification=I_NOTIFY +scorpio.topics.internalregsub=I_REGSUB +scorpio.topics.regsubalive=REG_SUB_ALIVE +scorpio.topics.regsubsync=REG_SUB_SYNC +scorpio.startupdelay=5s +scorpio.registry.subscription.checkinterval=30s +#Database settings +quarkus.datasource.db-kind=postgresql +quarkus.datasource.username=${mysettings.postgres.username} +quarkus.datasource.password=${mysettings.postgres.password} +quarkus.datasource.jdbc.url=${jdbcurl} +quarkus.datasource.reactive.url=postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name} +quarkus.datasource.reactive.shared=true +quarkus.datasource.reactive.max-size=20 +#quarkus.datasource.reactive.name=blabliblub +quarkus.flyway.migrate-at-start=true +quarkus.flyway.baseline-on-migrate=true +quarkus.flyway.connect-retries=10 +quarkus.flyway.repair-at-start=true +selfhostcorecontext=http://localhost:9090/corecontext +ngsild.corecontext=https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context-v1.3.jsonld \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190604.1__entity.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190604.1__entity.sql new file mode 100644 index 0000000000000000000000000000000000000000..a8259f8bf603988add0925c6985b979640cdc13e --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190604.1__entity.sql @@ -0,0 +1,57 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS Entity ( + id TEXT NOT NULL, + type TEXT, + data JSONB NOT NULL, + context JSONB, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + observationSpace GEOMETRY(Geometry, 4326), + operationSpace GEOMETRY(Geometry, 4326), + PRIMARY KEY (id)) +; + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER entity_extract_jsonb_fields BEFORE INSERT OR UPDATE ON entity + FOR EACH ROW EXECUTE PROCEDURE entity_extract_jsonb_fields(); + +-- create indexes for performance +CREATE INDEX i_entity_type ON entity (type); +CREATE INDEX i_entity_createdat ON entity (createdat); +CREATE INDEX i_entity_modifiedat ON entity (modifiedat); +CREATE INDEX i_entity_location ON entity USING GIST (location); +CREATE INDEX i_entity_observationspace ON entity USING GIST (observationspace); +CREATE INDEX i_entity_operationspace ON entity USING GIST (operationspace); + +-- to check if this index will be used by the database optimizer, or if it should be applied only for for certain keys +-- check https://www.postgresql.org/docs/current/static/datatype-json.html +CREATE INDEX i_entity_data ON entity USING GIN (data); diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190604.2__registry.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190604.2__registry.sql new file mode 100644 index 0000000000000000000000000000000000000000..51d767f1a6ee0368765817ebce039132d1160bd9 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190604.2__registry.sql @@ -0,0 +1,135 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS csource ( + id TEXT NOT NULL, + data JSONB NOT NULL, + type TEXT, + name TEXT, + description TEXT, + timestamp_start TIMESTAMP, + timestamp_end TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + expires TIMESTAMP, + endpoint TEXT, + internal boolean default false, + has_registrationinfo_with_attrs_only BOOL NOT NULL DEFAULT FALSE, + has_registrationinfo_with_entityinfo_only BOOL NOT NULL DEFAULT FALSE, + PRIMARY KEY (id)) +; + +-- create indexes for performance +CREATE INDEX i_csource_data ON csource USING GIN (data); +CREATE INDEX i_csource_name ON csource (name); +CREATE INDEX i_csource_timestamp_start ON csource (timestamp_start); +CREATE INDEX i_csource_timestamp_end ON csource (timestamp_end); +CREATE INDEX i_csource_location ON csource USING GIST (location); +CREATE INDEX i_csource_expires ON csource (expires); +CREATE INDEX i_csource_endpoint ON csource (endpoint); +CREATE INDEX i_csource_internal ON csource (internal); + +CREATE TABLE IF NOT EXISTS csourceinformation ( + id BIGSERIAL, + csource_id TEXT NOT NULL REFERENCES csource(id) ON DELETE CASCADE ON UPDATE CASCADE, + group_id BIGINT, + entity_id TEXT, + entity_idpattern TEXT, + entity_type TEXT, + property_id TEXT, + relationship_id TEXT, + PRIMARY KEY (id)) +; +CREATE SEQUENCE csourceinformation_group_id_seq OWNED BY csourceinformation.group_id; -- used by csource trigger +-- create indexes for performance +CREATE INDEX i_csourceinformation_csource_id ON csourceinformation (csource_id); +CREATE INDEX i_csourceinformation_entity_type_id_idpattern ON csourceinformation (entity_type, entity_id, entity_idpattern); +CREATE INDEX i_csourceinformation_entity_type_id ON csourceinformation (entity_type, entity_id); +CREATE INDEX i_csourceinformation_entity_type_idpattern ON csourceinformation (entity_type, entity_idpattern); +CREATE INDEX i_csourceinformation_property_id ON csourceinformation (property_id); +CREATE INDEX i_csourceinformation_relationship_id ON csourceinformation (relationship_id); +CREATE INDEX i_csourceinformation_group_property_relationship ON csourceinformation (group_id, property_id, relationship_id); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields_to_information_table(); \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190604.3__temporal.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190604.3__temporal.sql new file mode 100644 index 0000000000000000000000000000000000000000..bc9d603a2b3ed51124507c18e77df266bb80c91a --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190604.3__temporal.sql @@ -0,0 +1,105 @@ +BEGIN; + +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS temporalentity ( + id TEXT NOT NULL, + type TEXT, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + PRIMARY KEY (id)) +; + +CREATE TABLE IF NOT EXISTS temporalentityattrinstance ( + internalid BIGSERIAL, + temporalentity_id TEXT NOT NULL REFERENCES temporalentity(id) ON DELETE CASCADE ON UPDATE CASCADE, + attributeid TEXT NOT NULL, + instanceid TEXT, + attributetype TEXT, + value TEXT, -- object (relationship) is also stored here + geovalue GEOMETRY, + createdat TIMESTAMP, + modifiedat TIMESTAMP, + observedat TIMESTAMP, + data JSONB NOT NULL, + static BOOL NOT NULL, + PRIMARY KEY (internalid)) +; +CREATE UNIQUE INDEX i_temporalentityattrinstance_entityid_attributeid_instanceid ON temporalentityattrinstance (temporalentity_id, attributeid, instanceid); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_extract_jsonb_fields BEFORE INSERT OR UPDATE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_update_static() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + f_count integer; + BEGIN + select into f_internalid, f_count min(internalid), count(1) from temporalentityattrinstance + where temporalentity_id = OLD.temporalentity_id AND attributeid = OLD.attributeid; + IF (f_count = 1) THEN + UPDATE temporalentityattrinstance SET static = true WHERE internalid = f_internalid; + END IF; + RETURN OLD; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_update_static AFTER DELETE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_update_static(); + +-- create indexes for performance + +CREATE INDEX i_temporalentity_type ON temporalentity (type); + +CREATE INDEX i_temporalentityattrinstance_data ON temporalentityattrinstance USING GIN (data); + +COMMIT; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190611.1__sysattrs.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190611.1__sysattrs.sql new file mode 100644 index 0000000000000000000000000000000000000000..3e24ff4111eb19227e14f1b3a2b90a2d863f337e --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190611.1__sysattrs.sql @@ -0,0 +1,7 @@ +-- entity +ALTER TABLE entity ALTER data DROP NOT NULL; +ALTER TABLE entity ADD data_without_sysattrs JSONB; + +-- csource +ALTER TABLE csource ALTER data DROP NOT NULL; +ALTER TABLE csource ADD data_without_sysattrs JSONB; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ab6fe3c66556cde740eaf56948e95d4556fa7c9 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql @@ -0,0 +1,121 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190703.1__keyvalues.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190703.1__keyvalues.sql new file mode 100644 index 0000000000000000000000000000000000000000..fae8d021431fadf39732600f684e69f5aa43447a --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190703.1__keyvalues.sql @@ -0,0 +1 @@ +ALTER TABLE entity ADD kvdata JSONB; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..13b2ff5ba21ce08cd58465e6b7b9240c592f6f5c --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql @@ -0,0 +1,131 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20210206.1__tenant_function.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20210206.1__tenant_function.sql new file mode 100644 index 0000000000000000000000000000000000000000..899626ca4ed38154b7e8344e98e1e0b41459d391 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20210206.1__tenant_function.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20210206.2__tenant_field.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20210206.2__tenant_field.sql new file mode 100644 index 0000000000000000000000000000000000000000..6e5e7a7599f89a684574be098ed4a96d75068c1d --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20210206.2__tenant_field.sql @@ -0,0 +1 @@ +ALTER TABLE csource ADD tenant_id TEXT; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20210206.3__tenant_table.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20210206.3__tenant_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ea65d8e5fd612f8a5f0a3cd20d9ae081aba11f1 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20210206.3__tenant_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS tenant ( + tenant_id TEXT NOT NULL, + database_name varchar(255) UNIQUE, + PRIMARY KEY (tenant_id) +); \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20211217.1__subscription_table.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20211217.1__subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..104b878e08881a8de88364102af8b82ac5cd1a1f --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20211217.1__subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..28f87847b253efcabcac9dc467a64ea1774766fa --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS registry_subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql new file mode 100644 index 0000000000000000000000000000000000000000..b8fc302dd290e0b4a560b3b5bf0c09e5fa0a199a --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql @@ -0,0 +1,163 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}')) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}')) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getGeoJson (ldjson jsonb) +RETURNS jsonb AS $geojson$ +declare + geojson jsonb; +BEGIN + SELECT json_build_object('type', substring(ldjson#>>'{@type,0}' from 32),'coordinates',getCoordinates(ldjson#>'{https://purl.org/geojson/vocab#coordinates,0,@list}')) into geojson; + RETURN geojson; +END; +$geojson$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220124.1__scope_support.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220124.1__scope_support.sql new file mode 100644 index 0000000000000000000000000000000000000000..40f3e01afad101fbea692822b60923ab63123965 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220124.1__scope_support.sql @@ -0,0 +1,52 @@ +ALTER TABLE public.entity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes,'{-1}', (i#>'{@value}')) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..77f733a8e2015aac5d0c1190fb0b5bbd6256fd24 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..0167acd3afc6a30007b262cef29778be77ec9089 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql @@ -0,0 +1,103 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220126.1__scope_support_2.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220126.1__scope_support_2.sql new file mode 100644 index 0000000000000000000000000000000000000000..6f7224edef85a212c0e339117292b2fbd78307e1 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220126.1__scope_support_2.sql @@ -0,0 +1,128 @@ +ALTER TABLE public.csource + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +ALTER TABLE public.temporalentity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220127.1__scope_support_3.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220127.1__scope_support_3.sql new file mode 100644 index 0000000000000000000000000000000000000000..aef923126f490e1683b02763d8cb70eb7f971c26 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220127.1__scope_support_3.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..a27bbc3ad1a40b4e5e7ad176746076c6cace0d70 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql new file mode 100644 index 0000000000000000000000000000000000000000..7710a0ee88d8dfd878acef4b862d42c051bb0d56 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}'), true) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}'), true) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql new file mode 100644 index 0000000000000000000000000000000000000000..6b5247225608c9e0224d3e823dcfa651b14cdfb0 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql new file mode 100644 index 0000000000000000000000000000000000000000..64998eb0a070a7e846fb27e46173897875035395 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + IF scopes IS NULL THEN + return false; + END IF; + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql new file mode 100644 index 0000000000000000000000000000000000000000..3fcb41a0d6a8461a015ac825c6a21ec9af3476e9 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql @@ -0,0 +1,150 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; + +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..36f137d1768dfa06191276d5fbb6cdf1319b1ef6 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = FALSE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20221122.1__move161.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20221122.1__move161.sql new file mode 100644 index 0000000000000000000000000000000000000000..2bfd6cf469984dc77c1e20130833088fd0b3423d --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20221122.1__move161.sql @@ -0,0 +1,554 @@ +DROP TABLE csourceinformation; + +Alter table public.csource DROP COLUMN "location",DROP COLUMN "name", DROP COLUMN endpoint,DROP COLUMN description,DROP COLUMN timestamp_end,DROP COLUMN timestamp_start,DROP COLUMN tenant_id,DROP COLUMN internal,DROP COLUMN has_registrationinfo_with_attrs_only,DROP COLUMN has_registrationinfo_with_entityinfo_only,DROP COLUMN data_without_sysattrs,DROP COLUMN scopes, DROP COLUMN expires, DROP COLUMN type; + +ALTER TABLE PUBLIC.CSOURCE RENAME COLUMN data TO REG; + +alter table public.csource rename column id to c_id; + +ALTER TABLE PUBLIC.CSOURCE DROP CONSTRAINT csource_pkey; + +ALTER TABLE IF EXISTS public.csource + ADD CONSTRAINT unique_c_id UNIQUE (c_id); + +ALTER TABLE IF EXISTS public.csource + ADD COLUMN id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ); + +ALTER TABLE public.csource ADD PRIMARY KEY (id); + +CREATE INDEX i_csource_c_id + ON public.csource USING hash + (c_id text_pattern_ops); + +CREATE INDEX i_csource_id + ON public.csource USING btree + (id); + + +CREATE TABLE public.csourceinformation( + id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ), + cs_id bigint, + c_id text, + e_id text, + e_id_p text, + e_type text, + e_prop text, + e_rel text, + i_location GEOMETRY(Geometry, 4326), + scopes text[], + expires timestamp without time zone, + endpoint text, + tenant_id text, + headers jsonb, + reg_mode smallint, + createEntity boolean, + updateEntity boolean, + appendAttrs boolean, + updateAttrs boolean, + deleteAttrs boolean, + deleteEntity boolean, + createBatch boolean, + upsertBatch boolean, + updateBatch boolean, + deleteBatch boolean, + upsertTemporal boolean, + appendAttrsTemporal boolean, + deleteAttrsTemporal boolean, + updateAttrsTemporal boolean, + deleteAttrInstanceTemporal boolean, + deleteTemporal boolean, + mergeEntity boolean, + replaceEntity boolean, + replaceAttrs boolean, + mergeBatch boolean, + retrieveEntity boolean, + queryEntity boolean, + queryBatch boolean, + retrieveTemporal boolean, + queryTemporal boolean, + retrieveEntityTypes boolean, + retrieveEntityTypeDetails boolean, + retrieveEntityTypeInfo boolean, + retrieveAttrTypes boolean, + retrieveAttrTypeDetails boolean, + retrieveAttrTypeInfo boolean, + createSubscription boolean, + updateSubscription boolean, + retrieveSubscription boolean, + querySubscription boolean, + deleteSubscription boolean, + entityMap boolean, + canCompress boolean, + CONSTRAINT id_pkey PRIMARY KEY (id), + CONSTRAINT cs_id_fkey FOREIGN KEY (cs_id) + REFERENCES public.csource (id) MATCH SIMPLE + ON UPDATE CASCADE + ON DELETE CASCADE +); + + +CREATE INDEX IF NOT EXISTS fki_cs_id_fkey + ON public.csourceinformation(cs_id); + +CREATE INDEX i_csourceinformation_e_type + ON public.csourceinformation USING hash + (e_type text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_rel + ON public.csourceinformation USING hash + (e_rel text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_prop + ON public.csourceinformation USING hash + (e_prop text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_id + ON public.csourceinformation USING hash + (e_id text_pattern_ops); + +CREATE INDEX i_csourceinformation_i_location + ON public.csourceinformation USING gist + (i_location gist_geometry_ops_2d); + +DROP FUNCTION public.csource_extract_jsonb_fields_to_information_table cascade; +DROP Trigger csource_extract_jsonb_fields ON csource; + +CREATE TABLE temp ( + c_id text, + reg jsonb +); +INSERT INTO temp SELECT c_id, reg FROM csource; + +DELETE FROM csource; + +INSERT INTO csource SELECT c_id, reg FROM temp; + +drop table temp; + +ALTER TABLE PUBLIC.ENTITY RENAME COLUMN DATA TO ENTITY; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN DATA_WITHOUT_SYSATTRS; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN KVDATA; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OBSERVATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OPERATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN CONTEXT; + +ALTER TABLE PUBLIC.ENTITY ADD COLUMN E_TYPES TEXT[]; + +CREATE INDEX "I_entity_scopes" + ON public.entity USING gin + (scopes array_ops); + +CREATE INDEX "I_entity_types" + ON public.entity USING gin + (e_types array_ops); + +CREATE OR REPLACE FUNCTION public.entity_extract_jsonb_fields() RETURNS trigger LANGUAGE plpgsql AS $function$ + BEGIN + + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NULL AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NOT NULL AND OLD.ENTITY <> NEW.ENTITY) THEN + NEW.createdat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + IF (NEW.ENTITY@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.ENTITY ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + NEW.scopes = getScopes(NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + RETURN NEW; + END; +$function$; + +UPDATE ENTITY SET E_TYPES=array_append(E_TYPES,TYPE); + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN type; + + +CREATE OR REPLACE FUNCTION CSOURCE_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE +BEGIN + NEW.C_ID = NEW.REG#>>'{@id}'; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,false,false]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS(); + +CREATE OR REPLACE FUNCTION GETMODE (MODETEXT text) RETURNS smallint AS $registry_mode$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$registry_mode$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; + + + +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + RETURN jsonb_set(ENTITY,ARRAY[attribName], tmp); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; + + diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230108.1__subscription161.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230108.1__subscription161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c8115353d5ba16497cc30b10ef8a1fe6e0915041 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230108.1__subscription161.sql @@ -0,0 +1,18 @@ +DROP TABLE subscriptions; +DROP TABLE registry_subscriptions; + +CREATE TABLE public.subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); + +CREATE TABLE public.registry_subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230212.1__context.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230212.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..665c49dd33b0c8c5bfea4e2361c29df16fd01e7d --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230212.1__context.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS public.contexts +( + id text NOT NULL, + body jsonb NOT NULL, + kind text NOT NULL, + createdat timestamp without time zone, + PRIMARY KEY (id) +); +ALTER TABLE public.contexts alter createdat set default now(); diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230220.1__batchops161.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230220.1__batchops161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c31264330e2d38c953e892ff29b43295aedfc5ea --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230220.1__batchops161.sql @@ -0,0 +1,99 @@ +CREATE OR REPLACE FUNCTION NGSILD_CREATEBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOCR$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj['success'] = resultObj['success'] || (entity->'@id')::jsonb; + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOCR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_agg(entityId); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || (newentity->'@id')::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230309.1__datamigration161.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230309.1__datamigration161.sql new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230311.1__temporal161.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230311.1__temporal161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c502a34416bf47b00231f8be37f6dba50a7c0c55 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230311.1__temporal161.sql @@ -0,0 +1,65 @@ +ALTER TABLE PUBLIC.temporalentity ADD COLUMN E_TYPES TEXT[]; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN VALUE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN attributetype; +CREATE INDEX "I_temporalentity_types" + ON public.temporalentity USING gin + (e_types array_ops); +UPDATE temporalentity SET E_TYPES=array_append(E_TYPES,TYPE); +ALTER TABLE PUBLIC.temporalentity DROP COLUMN type; +ALTER TABLE PUBLIC.temporalentity ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN static; +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopeEntry (scopeList text[]) +RETURNS jsonb AS $scopes$ +declare + scopes jsonb; + i text; +BEGIN + scopes := '[]'::jsonb; + FOREACH i IN ARRAY scopeList LOOP + scopes = scopes || jsonb_build_object('@value', i); + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + if scopeList is null THEN + RETURN null; + END IF; + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE INDEX i_temporalentityattrinstance_attribname + ON public.temporalentityattrinstance USING hash + (attributeid text_ops); +CREATE INDEX i_temporalentity_location ON public.temporalentityattrinstance USING GIST (geovalue); \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230410.1__entitymap.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230410.1__entitymap.sql new file mode 100644 index 0000000000000000000000000000000000000000..92b172eb27cbfb372bfc729a44b1009b3946e4d5 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230410.1__entitymap.sql @@ -0,0 +1,19 @@ +CREATE TABLE public.entitymap +( + "q_token" text NOT NULL, + "entity_id" text, + "remote_hosts" jsonb, + "order_field" numeric NOT NULL +); + +CREATE INDEX i_entitymap_qtoken + ON public.entitymap USING hash + ("q_token" text_pattern_ops) +; + +CREATE TABLE public.entitymap_management +( + q_token text NOT NULL, + last_access timestamp with time zone NOT NULL, + PRIMARY KEY (q_token) +); diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230623.1__merge_patch.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230623.1__merge_patch.sql new file mode 100644 index 0000000000000000000000000000000000000000..684f327524131fa450d4e3deba24b4ab762ed4db --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230623.1__merge_patch.sql @@ -0,0 +1,36 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +if merged_json::text like '%"urn:ngsi-ld:null"%' THEN +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +end if; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..29a8a59a3c89cdad8b22af1254310c3d3f88c4c9 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql @@ -0,0 +1,29 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id'; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230705.1__core_context_store.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230705.1__core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..66bf42339d3705b05931f4a532703aa74769dc73 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230705.1__core_context_store.sql @@ -0,0 +1,300 @@ +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } +} +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql new file mode 100644 index 0000000000000000000000000000000000000000..af7e046119aac14e17ee33dc1cc6a074d723977c --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql @@ -0,0 +1,128 @@ +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230726.1__fixsubs.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230726.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..4520fbc02736783525f5e80a3980b023ce99263c --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230726.1__fixsubs.sql @@ -0,0 +1 @@ +update subscriptions set subscription=subscription-'https://uri.etsi.org/ngsi-ld/lastFailure ' \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230810.1__historyup.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230810.1__historyup.sql new file mode 100644 index 0000000000000000000000000000000000000000..06402b2bf88db1ca416edda068dc0dee6706574d --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230810.1__historyup.sql @@ -0,0 +1,39 @@ +ALTER TABLE IF EXISTS public.temporalentityattrinstance + ADD COLUMN IF NOT EXISTS location geometry; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_location + ON public.temporalentityattrinstance USING gist + (location) + WITH (buffering=auto) +; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_entityid + ON public.temporalentityattrinstance USING hash + (temporalentity_id) +; +with x as (SELECT distinct temporalentity_id as eid, geovalue, modifiedat as mat, observedat as oat, COALESCE(modifiedat, observedat) FROM temporalentityattrinstance WHERE geovalue is not null ORDER BY COALESCE(modifiedat, observedat)) UPDATE temporalentityattrinstance SET location = (SELECT x.geovalue FROM x WHERE eid = temporalentity_id and COALESCE(x.mat, x.oat) <= COALESCE(modifiedat, observedat) ORDER BY COALESCE(modifiedat, observedat) DESC limit 1) WHERE location is not null; + +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ + diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql new file mode 100644 index 0000000000000000000000000000000000000000..a17d3b8879ba7f194546f3f3ace5f41e42e9a2ec --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql @@ -0,0 +1,52 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql new file mode 100644 index 0000000000000000000000000000000000000000..82cac5034c11506304e8109eb2aa122cd408b952 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql @@ -0,0 +1,56 @@ +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + IF not attribValues ? 'https://uri.etsi.org/ngsi-ld/modifiedAt' THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + tmp := jsonb_set(tmp,Array['0','https://uri.etsi.org/ngsi-ld/modifiedAt'], Entity->'https://uri.etsi.org/ngsi-ld/modifiedAt',true); + END IF; + RETURN jsonb_set(Entity,Array[attribName,'0'], (Entity->attribName->0) || (tmp->0),true); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..833426b43969a0c3842988b8d0631e776f23cbd0 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql @@ -0,0 +1,314 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabularyProperty": "ngsi-ld:VocabularyProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230904.1__fixsubs.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230904.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..02ca66926497a6b82e4bcf2d39ad6a5e9ec38489 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20230904.1__fixsubs.sql @@ -0,0 +1 @@ +UPDATE SUBSCRIPTIONS SET SUBSCRIPTION=JSONB_SET(SUBSCRIPTION, '{@id}', ('"'||SUBSCRIPTION_ID||'"')::jsonb, true); \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql new file mode 100644 index 0000000000000000000000000000000000000000..a09bbd49ecbaa11601b43f09a7d630fcbcaf446b --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql @@ -0,0 +1,96 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', entityId, 'old', prev_entity); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql new file mode 100644 index 0000000000000000000000000000000000000000..5088d096c22fe1aa5e8b82aa5391b25dbd76a0e3 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql @@ -0,0 +1,57 @@ +DROP FUNCTION merge_json(text,jsonb); + +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; +ret := jsonb_build_array(previous_entity, merged_json); + + RETURN ret; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..795a2f213be016348be3eebc8c31bcd77c9f3a8f --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql @@ -0,0 +1,25 @@ +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE teai.internalid = new.internalid and COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql new file mode 100644 index 0000000000000000000000000000000000000000..a7437255d864ad92561c657c4e23a22cb4d951b5 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql @@ -0,0 +1,75 @@ +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb, NOOVERWRITE boolean) RETURNS jsonb AS $ENTITYOAR$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Overwriting'); + ELSIF NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + ELSE + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + + RETURN resultObj; +END; +$ENTITYOAR$ +LANGUAGE PLPGSQL; + + +ALTER TABLE temporalentityattrinstance ADD COLUMN IF NOT EXISTS static boolean \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231128.1__upsertfix.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231128.1__upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..573c77b1b3701ed5532925bada113667267c7dbe --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231128.1__upsertfix.sql @@ -0,0 +1,44 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..017016b3606fcb09d107b10217acec17bb799c2d --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql @@ -0,0 +1,363 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240212.1__merge_batchops.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240212.1__merge_batchops.sql new file mode 100644 index 0000000000000000000000000000000000000000..c5da5b65a9b6a9189123871366d0d474a238c250 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240212.1__merge_batchops.sql @@ -0,0 +1,66 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON_BATCH(b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id',newentity->'@id')::jsonb; + ELSE + resultObj['failure'] := resultObj['failure'] || jsonb_object_agg(newentity->'@id'->>0, 'Not Found'); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240319.1__context.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240319.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..38ae052ffe9a214504c3912b7b5e6c1a92b17308 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240319.1__context.sql @@ -0,0 +1,365 @@ +ALTER TABLE public.contexts add column lastUsage timestamp without time zone, add column numberOfHits bigint default 0; + +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'Hosted'); \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..19e8cf97e5ecba2781bc4d559f05787b4fd3e9a3 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql @@ -0,0 +1,663 @@ + +DROP TABLE IF EXISTS public.entitymap; +DROP TABLE IF EXISTS public.entitymap_management; +DROP FUNCTION IF EXISTS ngsild_appendbatch(jsonb); +DROP FUNCTION IF EXISTS ngsild_upsertbatch(jsonb); + +CREATE OR REPLACE FUNCTION public.ngsild_deletebatch(IN entity_ids jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, 'Not Found')); + else + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', prev_entity)); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_createbatch(IN entities jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || (entity->'@id')::jsonb); + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_appendbatch(IN entities jsonb,IN nooverwrite boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Overwriting')); + ELSIF NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + ELSE + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb); + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_upsertbatch(IN entities jsonb,IN do_replace boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE TABLE public.entitymap +( + id text, + expires_at timestamp without time zone, + last_access timestamp without time zone, + entity_map jsonb, + followup_select text, + PRIMARY KEY (id) +); + +CREATE OR REPLACE FUNCTION public.getmode(IN modetext text) + RETURNS smallint + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.updateMapIfNeeded(IN ids text[], ientityMap jsonb, entityMapToken text) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entityMapEntry jsonb; + +BEGIN + if array_length(ids, 1) = 0 or ids is null then + return ientityMap; + else + entityMapEntry := ientityMap -> 'entityMap'; + SELECT jsonb_agg(entry) INTO entityMapEntry FROM jsonb_array_elements(entityMapEntry) as entry, jsonb_object_keys(entry) as id WHERE NOT(id = ANY(ids)); + ientityMap := jsonb_set(ientityMap, '{entityMap}', entityMapEntry); + UPDATE ENTITYMAP SET LAST_ACCESS = NOW(), entity_map = ientityMap WHERE id=entityMapToken; + return ientityMap; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.getEntityMapAndEntities(IN entityMapToken text, ids text[], ilimit int, ioffset int) + RETURNS TABLE(id text, entity jsonb, parent boolean, e_types text[], entity_map jsonb) + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entitymap jsonb; + regempty boolean; + noRootLevelRegEntry boolean; + queryText text; +BEGIN + if ids is null or array_length(ids, 1) = 0 then + UPDATE ENTITYMAP SET LAST_ACCESS = NOW() WHERE ENTITYMAP.id=entityMapToken RETURNING ENTITYMAP.ENTITY_MAP INTO entitymap; + if entitymap is null then + RAISE EXCEPTION 'Nonexistent ID --> %', entityMapToken USING ERRCODE = 'S0001'; + end if; + regempty := entitymap -> 'regEmptyOrNoRegEntryAndNoLinkedQuery'; + noRootLevelRegEntry := entitymap -> 'noRootLevelRegEntryAndLinkedQuery'; + + if regempty or noRootLevelRegEntry then + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY > $2), ' + || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ' limit $3), X as (SELECT D0.ID as id, max(D0.ordinality) as maxOrdinality FROM D0 GROUP BY D0.ID), C as (SELECT updateMapIfNeeded(ids.aggIds, $4, $5) as entity_map FROM (SELECT ARRAY_AGG(a.id) as aggIds FROM a LEFT JOIN X ON a.id = X.ID WHERE X.ID IS NULL AND a.ordinality <= X.maxOrdinality) as ids)' + || (entitymap ->> 'finalselect')) using (entitymap->'entityMap'), ioffset, ilimit, entitymap, entityMapToken; + else + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY between $2 and ($2 + $3) and entityIdEntry.value ? ''@none''), C as (SELECT $4 as entity_map), ' || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ')' ||(entitymap ->> 'finalselect')) using entitymap->'entityMap', ioffset, ilimit, entitymap; + end if; + else + if regempty or noRootLevelRegEntry then + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + else + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + end if; + end if; +END; +$BODY$; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS entitymap; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS cancompress; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN queryEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN createEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN updateEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN deleteEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN retrieveEntityMap boolean; + +UPDATE public.csourceinformation SET queryEntityMap = false,createEntityMap = false, updateEntityMap = false, deleteEntityMap = false,retrieveEntityMap = false; + +CREATE OR REPLACE FUNCTION public.getoperations(IN operationjson jsonb) + RETURNS boolean[] + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + operations[37] = true; + operations[38] = true; + operations[39] = true; + operations[40] = true; + operations[41] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'queryEntityMap' THEN + operations[37] = true; + WHEN 'createEntityMap' THEN + operations[38] = true; + WHEN 'updateEntityMap' THEN + operations[39] = true; + WHEN 'deleteEntityMap' THEN + operations[40] = true; + WHEN 'retrieveEntityMap' THEN + operations[41] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.csourceinformation_extract_jsonb_fields() + RETURNS trigger + LANGUAGE 'plpgsql' + VOLATILE + COST 100 +AS $BODY$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..474a2ef4780544dc6697fefec62900f6c79bc1ed --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql @@ -0,0 +1,834 @@ +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id',newentity->>'@id', 'old', previous_entity)); + ELSE + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$BODY$; + +UPDATE contexts SET body = '{ + + "@context": { + + "@version": 1.1, + + "@protected": true, + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + + "geojson": "https://purl.org/geojson/vocab#", + + "id": "@id", + + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + + "AttributeList": "ngsi-ld:AttributeList", + + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + + "Date": "ngsi-ld:Date", + + "DateTime": "ngsi-ld:DateTime", + + "EntityType": "ngsi-ld:EntityType", + + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + + "EntityTypeList": "ngsi-ld:EntityTypeList", + + "Feature": "geojson:Feature", + + "FeatureCollection": "geojson:FeatureCollection", + + "GeoProperty": "ngsi-ld:GeoProperty", + + "GeometryCollection": "geojson:GeometryCollection", + + "JsonProperty": "ngsi-ld:JsonProperty", + + "LanguageProperty": "ngsi-ld:LanguageProperty", + + "LineString": "geojson:LineString", + + "ListProperty": "ngsi-ld:ListProperty", + + "ListRelationship": "ngsi-ld:ListRelationship", + + "MultiLineString": "geojson:MultiLineString", + + "MultiPoint": "geojson:MultiPoint", + + "MultiPolygon": "geojson:MultiPolygon", + + "Notification": "ngsi-ld:Notification", + + "Point": "geojson:Point", + + "Polygon": "geojson:Polygon", + + "Property": "ngsi-ld:Property", + + "Relationship": "ngsi-ld:Relationship", + + "Subscription": "ngsi-ld:Subscription", + + "TemporalProperty": "ngsi-ld:TemporalProperty", + + "Time": "ngsi-ld:Time", + + "VocabProperty": "ngsi-ld:VocabProperty", + + "accept": "ngsi-ld:accept", + + "attributeCount": "attributeCount", + + "attributeDetails": "attributeDetails", + + "attributeList": { + + "@id": "ngsi-ld:attributeList", + + "@type": "@vocab" + + }, + + "attributeName": { + + "@id": "ngsi-ld:attributeName", + + "@type": "@vocab" + + }, + + "attributeNames": { + + "@id": "ngsi-ld:attributeNames", + + "@type": "@vocab" + + }, + + "attributeTypes": { + + "@id": "ngsi-ld:attributeTypes", + + "@type": "@vocab" + + }, + + "attributes": { + + "@id": "ngsi-ld:attributes", + + "@type": "@vocab" + + }, + + "attrs": "ngsi-ld:attrs", + + "avg": { + + "@id": "ngsi-ld:avg", + + "@container": "@list" + + }, + + "bbox": { + + "@container": "@list", + + "@id": "geojson:bbox" + + }, + + "cacheDuration": "ngsi-ld:cacheDuration", + + "containedBy": "ngsi-ld:isContainedBy", + + "contextSourceAlias": "ngsi-ld:contextSourceAlias", + + "contextSourceExtras": { + + "@id": "ngsi-ld:contextSourceExtras", + + "@type": "@json" + + }, + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + + "contextSourceTimeAt": { + + "@id": "ngsi-ld:contextSourceTimeAt", + + "@type": "DateTime" + + }, + + "contextSourceUptime": "ngsi-ld:contextSourceUptime", + + "cooldown": "ngsi-ld:cooldown", + + "coordinates": { + + "@container": "@list", + + "@id": "geojson:coordinates" + + }, + + "createdAt": { + + "@id": "ngsi-ld:createdAt", + + "@type": "DateTime" + + }, + + "csf": "ngsi-ld:csf", + + "data": "ngsi-ld:data", + + "dataset": { + + "@id": "ngsi-ld:hasDataset", + + "@container": "@index" + + }, + + "datasetId": { + + "@id": "ngsi-ld:datasetId", + + "@type": "@id" + + }, + + "deletedAt": { + + "@id": "ngsi-ld:deletedAt", + + "@type": "DateTime" + + }, + + "description": "http://purl.org/dc/terms/description", + + "detail": "ngsi-ld:detail", + + "distinctCount": { + + "@id": "ngsi-ld:distinctCount", + + "@container": "@list" + + }, + + "endAt": { + + "@id": "ngsi-ld:endAt", + + "@type": "DateTime" + + }, + + "endTimeAt": { + + "@id": "ngsi-ld:endTimeAt", + + "@type": "DateTime" + + }, + + "endpoint": "ngsi-ld:endpoint", + + "entities": "ngsi-ld:entities", + + "pick": "ngsi-ld:pick", + + "omit": "ngsi-ld:omit", + + "jsonKeys": "ngsi-ld:jsonKeys", + + "entity": "ngsi-ld:entity", + + "entityCount": "ngsi-ld:entityCount", + + "entityId": { + + "@id": "ngsi-ld:entityId", + + "@type": "@id" + + }, + + "entityList": { + + "@id": "ngsi-ld:entityList", + + "@container": "@list" + + }, + + "entityMap": "ngsi-ld:hasEntityMap", + + "error": "ngsi-ld:error", + + "errors": "ngsi-ld:errors", + + "expiresAt": { + + "@id": "ngsi-ld:expiresAt", + + "@type": "DateTime" + + }, + + "features": { + + "@container": "@set", + + "@id": "geojson:features" + + }, + + "format": "ngsi-ld:format", + + "geoQ": "ngsi-ld:geoQ", + + "geometry": "geojson:geometry", + + "geoproperty": "ngsi-ld:geoproperty", + + "georel": "ngsi-ld:georel", + + "idPattern": "ngsi-ld:idPattern", + + "information": "ngsi-ld:information", + + "instanceId": { + + "@id": "ngsi-ld:instanceId", + + "@type": "@id" + + }, + + "isActive": "ngsi-ld:isActive", + + "join": "ngsi-ld:join", + + "joinLevel": "ngsi-ld:hasJoinLevel", + + "json": { + + "@id": "ngsi-ld:hasJSON", "@type": "@json" + + }, + + "jsons": { + + "@id": "ngsi-ld:jsons", + + "@container": "@list" + + }, + + "key": "ngsi-ld:hasKey", + + "lang": "ngsi-ld:lang", + + "languageMap": { + + "@id": "ngsi-ld:hasLanguageMap", + + "@container": "@language" + + }, + + "languageMaps": { + + "@id": "ngsi-ld:hasLanguageMaps", + + "@container": "@list" + + }, + + "lastFailure": { + + "@id": "ngsi-ld:lastFailure", + + "@type": "DateTime" + + }, + + "lastNotification": { + + "@id": "ngsi-ld:lastNotification", + + "@type": "DateTime" + + }, + + "lastSuccess": { + + "@id": "ngsi-ld:lastSuccess", + + "@type": "DateTime" + + }, + + "linkedMaps": "ngsi-ld:linkedMaps", + + "localOnly": "ngsi-ld:localOnly", + + "location": "ngsi-ld:location", + + "management": "ngsi-ld:management", + + "managementInterval": "ngsi-ld:managementInterval", + + "max": { + + "@id": "ngsi-ld:max", + + "@container": "@list" + + }, + + "min": { + + "@id": "ngsi-ld:min", + + "@container": "@list" + + }, + + "mode": "ngsi-ld:mode", + + "modifiedAt": { + + "@id": "ngsi-ld:modifiedAt", + + "@type": "DateTime" + + }, + + "notification": "ngsi-ld:notification", + + "notificationTrigger": "ngsi-ld:notificationTrigger", + + "notifiedAt": { + + "@id": "ngsi-ld:notifiedAt", + + "@type": "DateTime" + + }, + + "notifierInfo": "ngsi-ld:notifierInfo", + + "notUpdated": "ngsi-ld:notUpdated", + + "object": { + + "@id": "ngsi-ld:hasObject", + + "@type": "@id" + + }, + + "objectList": { + + "@id": "ngsi-ld:hasObjectList", + + "@container": "@list" + + }, + + "objects": { + + "@id": "ngsi-ld:hasObjects", + + "@container": "@list" + + }, + + "objectsLists": { + + "@id": "ngsi-ld:hasObjectsLists", + + "@container": "@list" + + }, + + "objectType": { + + "@id": "ngsi-ld:hasObjectType", + + "@type": "@vocab" + + }, + + "observationInterval": "ngsi-ld:observationInterval", + + "observationSpace": "ngsi-ld:observationSpace", + + "observedAt": { + + "@id": "ngsi-ld:observedAt", + + "@type": "DateTime" + + }, + + "operationSpace": "ngsi-ld:operationSpace", + + "operations": "ngsi-ld:operations", + + "previousJson": { + + "@id": "ngsi-ld:hasPreviousJson", + + "@type": "@json" + + }, + + "previousLanguageMap": { + + "@id": "ngsi-ld:hasPreviousLanguageMap", + + "@container": "@language" + + }, + + "previousObject": { + + "@id": "ngsi-ld:hasPreviousObject", + + "@type": "@id" + + }, + + "previousObjectList": { + + "@id": "ngsi-ld:hasPreviousObjectList", + + "@container": "@list" + + }, + + "previousValue": "ngsi-ld:hasPreviousValue", + + "previousValueList": { + + "@id": "ngsi-ld:hasPreviousValueList", + + "@container": "@list" + + }, + + "previousVocab": { + + "@id": "ngsi-ld:hasPreviousVocab", + + "@type": "@vocab" + + }, + + "properties": "geojson:properties", + + "propertyNames": { + + "@id": "ngsi-ld:propertyNames", + + "@type": "@vocab" + + }, + + "q": "ngsi-ld:q", + + "reason": "ngsi-ld:reason", + + "receiverInfo": "ngsi-ld:receiverInfo", + + "refreshRate": "ngsi-ld:refreshRate", + + "registrationId": "ngsi-ld:registrationId", + + "registrationName": "ngsi-ld:registrationName", + + "relationshipNames": { + + "@id": "ngsi-ld:relationshipNames", + + "@type": "@vocab" + + }, + + "scope": "ngsi-ld:scope", + + "scopeQ": "ngsi-ld:scopeQ", + + "showChanges": "ngsi-ld:showChanges", + + "startAt": { + + "@id": "ngsi-ld:startAt", + + "@type": "DateTime" + + }, + + "status": "ngsi-ld:status", + + "stddev": { + + "@id": "ngsi-ld:stddev", + + "@container": "@list" + + }, + + "subscriptionId": { + + "@id": "ngsi-ld:subscriptionId", + + "@type": "@id" + + }, + + "subscriptionName": "ngsi-ld:subscriptionName", + + "success": { + + "@id": "ngsi-ld:success", + + "@type": "@id" + + }, + + "sum": { + + "@id": "ngsi-ld:sum", + + "@container": "@list" + + }, + + "sumsq": { + + "@id": "ngsi-ld:sumsq", + + "@container": "@list" + + }, + + "sysAttrs": "ngsi-ld:sysAttrs", + + "temporalQ": "ngsi-ld:temporalQ", + + "tenant": { + + "@id": "ngsi-ld:tenant", + + "@type": "@id" + + }, + + "throttling": "ngsi-ld:throttling", + + "timeAt": { + + "@id": "ngsi-ld:timeAt", + + "@type": "DateTime" + + }, + + "timeInterval": "ngsi-ld:timeInterval", + + "timeout": "ngsi-ld:timeout", + + "timeproperty": "ngsi-ld:timeproperty", + + "timerel": "ngsi-ld:timerel", + + "timesFailed": "ngsi-ld:timesFailed", + + "timesSent": "ngsi-ld:timesSent", + + "title": "http://purl.org/dc/terms/title", + + "totalCount": { + + "@id": "ngsi-ld:totalCount", + + "@container": "@list" + + }, + + "triggerReason": "ngsi-ld:triggerReason", + + "typeList": { + + "@id": "ngsi-ld:typeList", + + "@type": "@vocab" + + }, + + "typeName": { + + "@id": "ngsi-ld:typeName", + + "@type": "@vocab" + + }, + + "typeNames": { + + "@id": "ngsi-ld:typeNames", + + "@type": "@vocab" + + }, + + "unchanged": "ngsi-ld:unchanged", + + "unitCode": "ngsi-ld:unitCode", + + "updated": "ngsi-ld:updated", + + "uri": "ngsi-ld:uri", + + "value": "ngsi-ld:hasValue", + + "valueList": { + + "@id": "ngsi-ld:hasValueList", + + "@container": "@list" + + }, + + "valueLists": { + + "@id": "ngsi-ld:hasValueLists", + + "@container": "@list" + + }, + + "values": { + + "@id": "ngsi-ld:hasValues", + + "@container": "@list" + + }, + + "vocab": { + + "@id": "ngsi-ld:hasVocab", + + "@type": "@vocab" + + }, + + "vocabs": { + + "@id": "ngsi-ld:hasVocabs", + + "@container": "@list" + + }, + + "watchedAttributes": { + + "@id": "ngsi-ld:watchedAttributes", + + "@type": "@vocab" + + }, + + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + + } + +} + +'::jsonb WHERE id=')$%^&'; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..90d4785b7e7d4b82c6ac1bf4c88ac56043f995bc --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql @@ -0,0 +1,963 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_point(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE +BEGIN + if not geo_json_entry ? '@list' or jsonb_array_length(geo_json_entry #> '{@list}') != 2 then + RAISE EXCEPTION 'Invalid geo point for geo json' USING ERRCODE = 'SB006'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.VALIDATE_GEO_JSON(IN GEO_JSON_ENTRY JSONB) RETURNS VOID LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + geo_type text; + value jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.clean_ngsi_ld_null(IN json_entry jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + json_type text; + result jsonb; + value jsonb; + cleaned jsonb; + key text; +BEGIN + json_type = jsonb_typeof(json_entry); + if json_type = 'array' then + result = '[]'::jsonb; + for value in select * from jsonb_array_elements(json_entry) loop + cleaned = clean_ngsi_ld_null(value); + if cleaned is not null then + result = result || cleaned; + end if; + end loop; + if jsonb_array_length(result) = 0 then + return null; + end if; + return result; + elsif json_type = 'object' then + result = '{}'; + for key, value in Select * from jsonb_each(json_entry) loop + if value::text != '"urn:ngsi-ld:null"' then + result = jsonb_set(result, '{key}', value); + end if; + end loop; + if result::text = '{}' then + return null; + end if; + return result; + else + if json_entry::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return json_entry; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_json(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_type text; + old_type text; + todelete jsonb; + deleted integer; + i integer; + index integer; + value jsonb; + value2 jsonb; + merged_json jsonb; + key text; +BEGIN + new_type = jsonb_typeof(new_attrib); + old_type = jsonb_typeof(old_attrib); + if old_attrib is null or new_type != old_type then + old_attrib := new_attrib; + end if; + todelete = '[]'::jsonb; + if new_type = 'array' then + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + for i in 0 .. jsonb_array_length(new_attrib) loop + if new_attrib ->> i = 'urn:ngsi-ld:null' then + todelete = todelete || i; + end if; + end loop; + deleted = 0; + if array_length(todelete) > 0 then + for i in select * from jsonb_array_elements(todelete) loop + new_attrib = new_attrib - (i - deleted); + deleted = deleted + 1; + end loop; + end if; + return new_attrib; + end if; + index = 0; + deleted = 0; + for value in select * from jsonb_array_elements(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + index := index + 1; + continue; + end if; + value2 = old_attrib[index - deleted]; + merged_json = merge_has_json(value, value2); + if merged_json is null then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - deleted)]::text[], merged_json); + end if; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + elsif new_type = 'object' then + for key, value in Select * from jsonb_each(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - key; + continue; + end if; + merged_json = merge_has_json(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + continue; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end loop; + if old_attrib::text = '{}' then + return null; + end if; + return old_attrib; + else + if new_attrib::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return new_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_vocab(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_language_map(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + index integer; + remove boolean; + value2 jsonb; + ln_found boolean; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in Select * from jsonb_array_elements(new_attrib) loop + if value ->> '@language' = '@none' and value ->> '@value' = 'urn:ngsi-ld:null' then + return null; + else + index = 0; + ln_found = false; + remove = false; + for value2 in Select * from jsonb_array_elements(old_attrib) loop + if value2 ->> '@language' = value->> '@language' then + ln_found = true; + if value ->> '@value' = 'urn:ngsi-ld:null' then + remove = true; + end if; + exit; + end if; + index = index + 1; + end loop; + if ln_found then + if remove then + old_attrib = old_attrib - index; + else + old_attrib = jsonb_set(old_attrib, ARRAY[index,'@value']::text[], value->'@value'); + end if; + else + old_attrib = old_attrib || value; + end if; + end if; + end loop; + RETURN old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_geo(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + key text; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,https://purl.org/geojson/vocab#coordinates,0,@list,0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + for value in select * from jsonb_array_elements(new_attrib) loop + PERFORM validate_geo_json(value); + end loop; + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://purl.org/geojson/vocab#coordinates' then + if value2 #>> '{0,@list,0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + elsif key = '@type' then + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + PERFORM validate_geo_json(old_attrib[(index - removed)]); + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib #> '{0,@list}'; + if old_attrib is null then + old_attrib = new_attrib; + end if; + old_value_list = old_attrib #> '{0,@list}'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed)]::text[], (old_attrib #> ARRAY[0,'@list',(index-removed)]::text[]) - key); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed),key]::text[], merged_json); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB004'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; + +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib -> '@list'; + if old_attrib is null then + old_attrib := new_attrib; + end if; + old_value_list = old_attrib -> '@list'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], value2); + end if; + elsif key = '@list' then + merged_json = merge_has_value_list(value, old_value_list[index - removed]); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + + else + merged_json = merge_has_value(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + arr_idx integer; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + arr_idx := index - removed; + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - arr_idx; + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], value2); + end if; + else + arr_idx := index - removed; + merged_json = merge_has_value(value2, old_attrib #> ARRAY[arr_idx,key]::text[]); + if merged_json is null then + old_attrib[arr_idx] = old_attrib[arr_idx] - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_ATTRIB_INSTANCE(IN NEW_ATTRIB JSONB, + + IN OLD_ATTRIB JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + attrib_type TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + new_attrib := new_attrib - 'https://uri.etsi.org/ngsi-ld/createdAt'; + attrib_type := old_attrib #>> '{@type,0}'; + if attrib_type != new_attrib #>> '{@type,0}' then + RAISE EXCEPTION 'Cannot change type of an attribute' USING ERRCODE = 'SB001'; + end if; + if attrib_type = 'https://uri.etsi.org/ngsi-ld/Property' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/Relationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValueList' then + merged_json = merge_has_value_list(value[0], old_attrib #> '{key,0}'); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListRelationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectList' then + merged_json = merge_has_object_list(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/GeoProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value_geo(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/LanguageProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasLanguageMap' then + merged_json = merge_has_language_map(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/VocabProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasVocab' then + merged_json = merge_has_vocab(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/JsonProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasJSON' then + merged_json = merge_has_json(value #> ARRAY[0,'@value']::text[], old_attrib #> ARRAY[key,0,'@value']::text[]); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key,0,'@value']::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + else + RAISE EXCEPTION 'Unknown type of an attribute %, %, %', attrib_type, old_attrib, new_attrib USING ERRCODE = 'SB002'; + end if; + return old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + else + if new_dataset_id is null then + deleted := deleted || 'null'; + else + deleted := deleted || new_dataset_id; + end if; + end if; + else + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_JSON(IN A text,IN B JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL UNSAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + deleted JSONB; + updated JSONB; +BEGIN + +Select entity into previous_entity from entity where id =a; +if previous_entity is null then + RAISE EXCEPTION 'Entity not found.' USING ERRCODE = '02000'; +end if; +Select entity into merged_json from entity where id =a; +deleted := '{}'; +updated := '{}'; +-- Iterate through keys in JSON B +FOR key, value IN SELECT * FROM JSONB_EACH(b) +LOOP + if key = '@id' or key = 'https://uri.etsi.org/ngsi-ld/createdAt'then + continue; + elsif key = '@type' then + value2 = merged_json -> key; + WITH combined AS ( + SELECT jsonb_array_elements(value) AS elem + UNION + SELECT jsonb_array_elements(value2) AS elem + ) + SELECT jsonb_agg(elem) into value2 AS merged_array FROM combined; + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' then + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value); + else + value2 = merged_json -> key; + value2 = merge_attrib(value, value2); + if value2 ->'result' = 'null'::jsonb or jsonb_array_length(value2 ->'result') = 0 then + merged_json = merged_json - key; + deleted = jsonb_set(deleted, ARRAY[key]::text[], '["@all"]'::jsonb); + else + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2 -> 'result'); + if jsonb_array_length(value2 -> 'deleted') != 0 then + if deleted ? key then + deleted = jsonb_set(deleted, ARRAY[key], ((deleted -> key) || (value2 -> 'deleted'))); + else + deleted = jsonb_set(deleted, ARRAY[key], ((value2 -> 'deleted'))); + end if; + end if; + + if jsonb_array_length(value2 -> 'updated') != 0 then + if updated ? key then + updated = jsonb_set(updated, ARRAY[key], ((updated -> key) || (value2 -> 'updated'))); + else + updated = jsonb_set(updated, ARRAY[key], ((value2 -> 'updated'))); + end if; + end if; + + end if; + + + end if; +END LOOP; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + +RETURN jsonb_build_object('old', previous_entity, 'new', merged_json, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; + newentity jsonb; + resultObj jsonb; + entityId text; + index integer; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + index := 0; + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + entityId := newentity->>'@id'; + IF entityId is null then + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object('no id row nr ' || index, 'No entity id provided')); + else + BEGIN + ret := MERGE_JSON(entityId, newentity); + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', ret -> 'old', 'new', ret -> 'new', 'deleted', ret -> 'deleted', 'updated', ret -> 'updated')::jsonb); + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entityId, SQLSTATE)); + END; + end if; + index := index + 1; + END LOOP; + RETURN resultObj; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..f9eea8fca78af88cd9cca10817372067d2fec0e3 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql @@ -0,0 +1,69 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_json(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + geo_type text; + value jsonb; + value2 jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPoint' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi point update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiLineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPolygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240922.1__mergeattrib.sql b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240922.1__mergeattrib.sql new file mode 100644 index 0000000000000000000000000000000000000000..98411df52c5cfd8208b71983d6624d4bfd7452b9 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/classes/db/migration/V20240922.1__mergeattrib.sql @@ -0,0 +1,71 @@ +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || ('null'::jsonb); + else + updated := updated || to_jsonb(new_dataset_id); + end if; + else + if new_dataset_id is null then + deleted := deleted || ('null'::jsonb); + else + deleted := deleted || to_jsonb(new_dataset_id); + end if; + end if; + else + if new_dataset_id is null then + updated := updated || ('null'::jsonb); + else + updated := updated || to_jsonb(new_dataset_id); + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/controller/RegistrySubscriptionController.class b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/controller/RegistrySubscriptionController.class new file mode 100644 index 0000000000000000000000000000000000000000..be924a7ae26eaafde3fdf1a184f1fca6432eaf54 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/controller/RegistrySubscriptionController.class differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingBase.class b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingBase.class new file mode 100644 index 0000000000000000000000000000000000000000..af06e8a570de7ae82baee7db260afe4995210db3 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingBase.class differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingByteArray.class b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingByteArray.class new file mode 100644 index 0000000000000000000000000000000000000000..51f505d79be582c88e5699ebc9f3470007c8e383 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingByteArray.class differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingInMemory.class b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingInMemory.class new file mode 100644 index 0000000000000000000000000000000000000000..0684573ee77232c7bbc61d93bbf5509d8c722a92 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingInMemory.class differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingString.class b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingString.class new file mode 100644 index 0000000000000000000000000000000000000000..3bd1103ab03d5f266e62aa6c5c45a21339ad3887 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingString.class differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncSQS.class b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncSQS.class new file mode 100644 index 0000000000000000000000000000000000000000..a5df1d49a4b324e537cae787de59c0808279649b Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncSQS.class differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceBase.class b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceBase.class new file mode 100644 index 0000000000000000000000000000000000000000..a740425efb1b50c5083d727b5e0c12af451eea6b Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceBase.class differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceByteArray.class b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceByteArray.class new file mode 100644 index 0000000000000000000000000000000000000000..3e76d8dbaac9d30d071e164401a5adb69942907b Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceByteArray.class differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceString.class b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceString.class new file mode 100644 index 0000000000000000000000000000000000000000..e4f51b5d204a3158031a030d3fe034819a3c6a6c Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceString.class differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/SyncService.class b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/SyncService.class new file mode 100644 index 0000000000000000000000000000000000000000..6ab0d8b728ec724949110389243157ef0fbd6e11 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/SyncService.class differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/repository/RegistrySubscriptionInfoDAO.class b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/repository/RegistrySubscriptionInfoDAO.class new file mode 100644 index 0000000000000000000000000000000000000000..4a2b7ee479646e5c5dec222da4cc1ee4e5c4b149 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/repository/RegistrySubscriptionInfoDAO.class differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/service/RegistrySubscriptionService.class b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/service/RegistrySubscriptionService.class new file mode 100644 index 0000000000000000000000000000000000000000..558fb63477edfa4aac32268022c832520d4f5bcd Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/classes/eu/neclab/ngsildbroker/registry/subscriptionmanager/service/RegistrySubscriptionService.class differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/maven-archiver/pom.properties b/scorpio-broker/RegistrySubscriptionManager/target/maven-archiver/pom.properties new file mode 100644 index 0000000000000000000000000000000000000000..c1dfb83b34545436b9a525efa3ce2fdbff8aa84b --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/maven-archiver/pom.properties @@ -0,0 +1,5 @@ +#Generated by Maven +#Fri Jan 03 03:28:01 UTC 2025 +groupId=eu.neclab.ngsildbroker +artifactId=registry-subscription-manager +version=5.0.5-SNAPSHOT diff --git a/scorpio-broker/RegistrySubscriptionManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst b/scorpio-broker/RegistrySubscriptionManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..ff9ac96a26c1dbeb9451abc5858d25659e32eadc --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst @@ -0,0 +1,12 @@ +eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceString.class +eu/neclab/ngsildbroker/registry/subscriptionmanager/repository/RegistrySubscriptionInfoDAO.class +eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingInMemory.class +eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingString.class +eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncSQS.class +eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingBase.class +eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceBase.class +eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceByteArray.class +eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingByteArray.class +eu/neclab/ngsildbroker/registry/subscriptionmanager/service/RegistrySubscriptionService.class +eu/neclab/ngsildbroker/registry/subscriptionmanager/controller/RegistrySubscriptionController.class +eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/SyncService.class diff --git a/scorpio-broker/RegistrySubscriptionManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst b/scorpio-broker/RegistrySubscriptionManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..1962792e232a9740c09374c4f54d5129fd19a616 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst @@ -0,0 +1,12 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/main/java/eu/neclab/ngsildbroker/registry/subscriptionmanager/controller/RegistrySubscriptionController.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/main/java/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingBase.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/main/java/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingByteArray.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/main/java/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingInMemory.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/main/java/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionMessagingString.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/main/java/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncSQS.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/main/java/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceBase.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/main/java/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceByteArray.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/main/java/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/RegistrySubscriptionSyncServiceString.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/main/java/eu/neclab/ngsildbroker/registry/subscriptionmanager/messaging/SyncService.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/main/java/eu/neclab/ngsildbroker/registry/subscriptionmanager/repository/RegistrySubscriptionInfoDAO.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/main/java/eu/neclab/ngsildbroker/registry/subscriptionmanager/service/RegistrySubscriptionService.java diff --git a/scorpio-broker/RegistrySubscriptionManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst b/scorpio-broker/RegistrySubscriptionManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..0f90d4f47903b42095b8c664e148b3156bb73292 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst @@ -0,0 +1,2 @@ +eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionControllerTest.class +eu/neclab/ngsildbroker/subscriptionmanager/controller/CustomProfile.class diff --git a/scorpio-broker/RegistrySubscriptionManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst b/scorpio-broker/RegistrySubscriptionManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..28802af58db49f255927a2fe396097ba72e6c2ae --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst @@ -0,0 +1,3 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/CustomProfile.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionControllerTest.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/RegistrySubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/service/RegistrySubscriptionServiceTest.java diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/app/registry-subscription-manager-5.0.5-SNAPSHOT.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/app/registry-subscription-manager-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..b37ecd0c47db712b08650ba76a966a3d181c7196 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/app/registry-subscription-manager-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..9d5d452f4a66d1165b27d4d604d2bddaad0e4cd7 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d9c956f0001cff936eb20c373592dcc6510f5ff Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f9f583af71764a7127e4da73677ed5c8dea97bfb Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0447153ded16f86775cdcb337f8576579c1ac0db Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..243fa559c2be0f61720e1a333e43d4da2e8b5516 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ce4cbac674f51eb2063ff475a0e70484b25ace9f Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..55063658b2d25baf50b6a3963c508233f695b3de Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..4a7a8e4b537b532f4f58717f3366e9cb6f0ff0ce Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b320c9d42b93b26981927265e5dc62b85e73263f Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d2520242b28d239ba3b138c17e65f8ae6103a787 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..0648aa9f6a2ea3b3ddd083471f1c8dacbb4bbc4f Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..134f669d9fbee6ca61a9c9bb36227376ed97d0a2 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3365ab16bb0cd576f88b808e9af9b024111a5070 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..778ea557232b0fb41df34d63353c219a371660b2 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fd5c901f4bb0e8ed59d2d040740021a7c5cf1b19 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f74846f3f35230a9e14c0bf98e4cccfec593b4c9 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ee6d31a6a025d92f9fb05550483fb6b415b7f066 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..52835b2540d6cb8aaffe22bd5c7c24203cc77538 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..dba6fdd40e73a6dfc06a462bc2646c2bfb7e6d5a Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c13bcb9104e907195d13bbb4f998c1e5594cc2e8 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..34be9026612b9553f55f5f1aed148fc96a9d8fcb Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3750b8c1cfae96e79305618c78653ac5fb9b6de5 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..327ee706dcf46e428dd6339b9744e77941e5b498 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c47febca79456ba4d389bbb46ea0e11e6a41bede Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6ff58aed923740c9a4f639b9a512dcfd08df921 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3aa01f1ee73130e4983d2c3520220b29995c4ccc Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..288bf56e1b4f5c5a2bb2152887c5ef12e6cddeae Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..f4f1af783043658e2b3879560b6e1ff0b8db66a1 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..85cefa6d7b5b644fb99075f6621ca60beb350cd9 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..c4b2b999ce72dcfdd9b63d6a62d0a2d7e2037034 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1d6d1c50a344b45ba375d53775b70ad5aac58f7 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..18e59c8c41de73e02e77298e981fa7e3051e4b5d Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a8f5dfe1b83122a9f085da1aa7fff451ed88e783 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b56dc751c1cc7dff75ed80ccbb45f027058e8ce Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar new file mode 100644 index 0000000000000000000000000000000000000000..10d10b62a49ad095f56d620620ee7eaa5d2fc62d Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..3595c4f9be5c0ce779f8dd611e7f6917ca518f5d Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a3336373b7aea1700b62d9aa60a15493586c3e8a Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..71079166b7bc51455b1e1d18ea4e5e942b3ae89f Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cbb633729cae09e5d65aefccd7b63c697f42b5cb Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eecfb70406fbaca61c7c9e5a549f77cbef2e849b Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a690bed6986df8a510ee4f05b2079264db7d71af Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5023670d73e75c539b0af285d35c4e9edaef2211 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eb3c2b0b82115e9820f781e944312b4c19b25ed4 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..705f285c9348d57ec059c73b90ed9836f4db6aa4 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..684cc24821451b65b9fc36376131490a9d03b37c Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..36f71a550601f4842536ad1a62a187a30c60eb69 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..b4f917b9687dc231c8f3b4cf3fc0a95e616846ea Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..878cc677337985f59ed9f4bb5cfcdb8ca4d0acbe Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..9afa6d70ae20b7082f786920e918fd70c138a5b3 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b626ceafb52c318581529fafbd1d33889f25c20 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..043052d031f59a3b289cc2bb7dda9b8352c58e11 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..316bdec6ab1a6ea4cd4dc33c9217cdf96e4c9049 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..73e95705d3152472d11fa2a5690626b652ff280a Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..7192fa0e49a0762ad44c218215e6197dd12197b2 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..4ecfb5dbec2f25d201de0a83d1143729830d49bd Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..072d06d46d38bcb7a63efb38075bf79ea111caf1 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e19ecdbd4d53bc0c09246f73926aaae70fe9493 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..08c1d314876730dc6f82ba65e741f03b8719609d Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..3427ee30e0e35e876eda5e5f1bed695d8c2636e9 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5facdd8e2f6e4f4fa3ceecc16962b3b15b9627c2 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..575355b83ce351f70a605a46ef5fa7f75f956835 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..39f5787fb3d458de0fd9f575345176b60e050961 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..a1e0f39535831b6f914098b776513c284c50b351 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..9637836909b5e59f7a83f88412a8571b4641f56b Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..93bc0ad010aab29e0a328d2ffafaeb60c82b3ecf Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..f06de9ed2afaa3668bccce11dcfe8924446817d0 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9f5b95529a5c7ac6044ffa4c60e6d412a1597104 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..9edc36a8a46a29b0e5a507fbd092a0e6629b8db7 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fe2973fcc47beca49e8e4061fe60a78c2a27102 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..0aedb22b8903d6eb70232c1e310266e0465b5f54 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..084d172fc3512a05145851e902ab1377a9d8b814 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..b740a4a827c0808baeda7112a5c40158e4eba664 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f99a4fc63360f9f5d1f0d751928b0b93d62ac81 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..75e3104ca9d784fa7fcaca27ecd239c02ac6e241 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3eab7c30fefb9c141226da5595a5faf9ff83d462 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..196cc61fca2b75593a6ac4cd38744fb6fcdc33bd Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..070baec335bcc35a79bd1ddf7ee52e809331c4c3 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..80221e424d28060c8620964fef488107c54f4e94 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c607cbdf717926be37b821d11c9c4b0ed9a2802 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..17262bba0ff1a57d9e893d832f8268389adfe340 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..30ef9922f2c8fe0c6a28cd108f33f9632fe82411 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b3a47cef61d53f9997148c1502dbb4ec313a6a47 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5b530ad1e484936df857104583ffcaf78b1715e3 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..96950282b9778bf3a3bd26d4c9125ffdc0f95c96 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b447f8aeb87c42e4489c0db15f30f93d4707fa45 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..11a94c16234d6e460c6e66a840ac7976e756c458 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d325421d690f179c4626844df98915a36e74ea Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0089e0cb796e6183029df4847b1b600d974a8b1a Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..6abfe2c3ec3923ddea06c4e6f4a4a440d72fa649 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar new file mode 100644 index 0000000000000000000000000000000000000000..99c263dc3f71a215a4a85901538f1dedacb51acf Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d9dbbe1b73c8f5435b7309f77520f6cbff82600c Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5d3b9086c238e3c3dbb2af68ce47f52eb269c664 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c03be12b0b2c7549c2db53ed275e45cd87e8a4dc Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0bf27883a28a42e50d17bca78ce368d79acaea13 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f440dcb8f735c055768cd95c2bc4457c4cbbb48 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e7adeb414fcb6108390beabc72d94e7f933e055d Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fba12107dee341bd5894a0e05deb09280561e517 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..48ec53f0dba9ea734d83d21f7d793bb2178bb8d1 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff161c12c4949a473a1d2ed1f597d6338337a321 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..577b2bb43f4750ecb4f5c1b9ba952d3866bcea22 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..10a28407ba7c16165164c35be0f9657812349a84 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d0ee5b30dbe7a8cb00c15600142ef6041a97f44 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e3c7750dd2c267fea46f510636ae576bf26894a Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a38e1850efbb73aaf248318253b11e0d0a64a50e Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9e257c119006bed0c77c8b8a6b2e05adfc71dde6 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a85e9e76c2870fca83485b31334a81a02f12781 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..548a6405f93dd3254c1123ecbcce2f96f6995e30 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f65840267108fe2b704976f14e3c17bd02290e20 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..68b053f16299ba0a9bcd80c6e795c2c9e0b186a1 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..89aaf8be3250033348a32ac75c1edc52d9d264ac Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..add785757c2a958f86e3a20c6034d46a38427d83 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e6f98b3a199f27f5c5ab47000862c0558116f3ef Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa88a2ec11f998225646b08795fedc215e530e6f Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cfd8c45d73d6d6e8da9c8fd24e04b2dcb86bff83 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6438a41b88c1605a994451cda5657f0a52669ad Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4bb4f9457907978ea011d1fc390c6ab2e6217888 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..494d0bb23d9ae6619c2a8f2e18ec458676f39316 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8ec4c28f2f004dc6d7b8ca0d36eeaa4b79f4a4bd Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d5bfe9c8b3c66f75327e6ce621da822c6b67fc Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..90607ec6492a8667ea6d2c39f77389e4083a2017 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8f8612553283b7f00d9dc7ab7eb5313b2f1eca2 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b0c51a8f29ef87a1c6390dd98c2e1e6aa0114f7 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1dcea88498ba439a3ac32e291067331c99331ace Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..92721ae048041c7dbddda0c88d417d740f3c299d Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3199cb971aa88ab386df667ad1d16d74d3bee22f Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c0ebec7fa19a8e96489714a2f9986017522becfb Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4696de7fd6cd077015cad5212b223de1dcc80307 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..04bc9cc78782c3b7bc78553481b94a10a02f8f9d Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..14d1445dba365c803f94f24f6fcb8467d13cf6e5 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5859e692a03f1173aa1f2b272a844c94d24c12e3 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c8cb76d3f0d9c0853168e740754f4d79042a105c Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b038ad4a83ac6df0e0a8f71265877dad2ded43ab Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c63764d56c7a95f9abf5230b80fae1a414651b61 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df5d82b54357c697c0d95fd9b8db277facfe0cde Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..07d3721a9c5c5e314e5eee21ec168139bcfed6c4 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8f7f3668d903b867c021d47fd08c02c45374e29c Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a774ecd7174486d0c6d077c388f1021c3b2ef05 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f90f28b0d6afa7518cd3095f64d8dbd9fd56b77d Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ba5add79fc5dee32ae8d3dfc0ffeebe75541b603 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..d32bd30fa7bfa74da1330810bc3eb3c07f174385 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..cc828adf40879d957e266f0b781f7bb3d864af07 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..08a59e5bb2d5572b0fdbe693f48a3f0a5073d4f0 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b821617242260f35360fb4247a01d66dea563004 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9908f1069718a85031db676224857e26dc7401f0 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d09f435a3cd1ae0395b4926c78311be6b276e0c2 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9bf5d12b59534df460f6ccb01dad8c8ffcd8a542 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3c5639431a5d51ef16ef94d82225effb271e8467 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d55e5f07ddae6c98b08fbff4848461fdbb2adf73 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..760d92fc11fe3672030cdd7c71416ad75b8ac775 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7504902f7621ee1764e207e2f5b2ffdeaa22e261 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..11867e294ef35ca6e9a6a82fb52dd52821c35d48 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e268ff4ef4543b76568d66444df43ce45751d3bc Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..662cf14d5c427f09ea259c484befd4cc2d328b2d Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..417d8b59b8ce5998e21e0275beb878ac50008bd8 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d3ab26e2476843ebb0d27c983443ff9a7ba3fee8 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f08a939bd2b90b9f87dc3f15ab88e15e8c48087e Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..abb08bb751b0e037476fc15d4da40a44d5432966 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57bc48ad35c44ab05ee4f8575f552ea0dd3b04c2 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..98d6fa439ad69c1dc9436d76d43e3a34636729ad Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..373471c5b69b1468fff9a50d614c1b84fa69fbdd Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff59cf0f6f80ce2f55e1f08afbd1750c7df6e2fc Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..49443391feae0e4b18c570a3a007db6187108031 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..723430b5a2624a415e2754bad5f69fd151e71dd3 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6169a6b468ab05de2c7962fc64ce8567f46c444 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b7cc8f08c3aa4845416101f90521f8736eef6973 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f1d68c985deb0d3b5c865a91ca3489e94d623dcf Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..99dc47ca15667630606af6d41f5e616acd66b168 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c7f07c892fbe634044ba8b8333aac2195d84a4b0 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..97a49b08f496d01d6a50e3b2455476c5b23ba50d Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ece3eb2d02159abb38ed4ea6b8fc4b599616c79e Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9ed9c88db7be8648c5b501b787a1addf9f0c1981 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b8cc43f8b9896a24d916d7bfdbdd02e396d7db1 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b232bcabeac47c30ac02371188d10bdb5d214a88 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8701dd9d98c6c7d61ea547823759f909474f7a9d Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..473ce54348410627710157274dd8d2fdfa39c0df Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..16255cf4093cd574a00574f812ee5109478fe9a4 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c3c712e90ce6ebdae145eec147d6d4a50bd0fe53 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..354030f4393f7f920b6e03ff894d8e5ea8727797 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..284a20484992c849fd9a5fb0f83f8f639f5d562f Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..467756a558a61103b2dc767e0833e561540ed8fd Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..477c706ef15c0f6622b55e64d3cb623cb3fd3ebd Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..305727ab77b9dcd6df44f8868efc6859b51a419e Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8c00aa871496b3293e1c008449162d449b9f64ea Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8a4fe6ed7c025ce502f81bbee92b36230457d647 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c322385c3404a0196e8d509f2f7cb892fcb0883a Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..2e158629507bc39be5c13b1b31fa3563e0ac44b1 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..b16bdbbc285bd848e6dac42208acfa3d6bfa6fab Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..0d015d54205c084ad31609cc2909853fee83476a Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..34c1d4394b358572a2c79b543cb7d094b0ba1b8d Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..316080f3b56134e8a821a50511b0e831886c9184 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5edfd71412f1470c93366d821b353ebaa85ecac3 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a3214715591141e2d6fbb0f5b71f52126a571a5 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a92e099d4fc25523e2830fa9b8181d319c9369a7 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a5b5b5fc36bb88093fd25a30b2d1d7fbe9e3cba Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..dbcbea90f1434f04d12c2039f9213c704d82ec31 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a5d9089734b0a7061dc14c4afc35884cc507636 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1e7da4be43dd1a10393608d1aff9c7a87460461 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..80670a1b87a7680fbac8c690d599361f8dd8d2ea Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..48242829bc38133a9cdcd36f8b2a9eebc53ab91a Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bffe4865f5835839900292dce062bb2f24921d76 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d8c4c56c7943f2fb3fcf2207f77f3bb7dd623550 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57b4cba6199355aa6c8fc9ad96e01f122e8613ff Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c4fe13ea86560b5be41f0602c8cef57dac8fed8 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d3e9ba64ae347356342ce7bd81694068426ce4 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..29fb4d66863e0ed67c991f16d7224d8e1b88ed98 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..83006f2ec04934050d5d41b033761f8c6f93c80a Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..66bc6931d673d72136bf7634d9d0653576bb05de Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fb9dadfc4843c7b3bd48544fe9c39068014ae49 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57eb121f34753b359ed328615b12d1335cdef5e0 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..666c3e4d030b41a58901b540d76547698cea41b4 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f5768a039af60a776a24b738659227810a5fc563 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b2d092cdf3c4b145df42a44b536ed0f8a604574c Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..08f353ce447773c14714390536f2abde814dff5b Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df32ff7e11ad182102a805c854bfc71cf2fec90e Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a93a240a399ca96f7f6298a849de2fdc996f066d Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8b05ebef8ea87d46d09eac112c1e8a5a9a549fc Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..eab628714f1e46bbabce09f59d5149071cb7b8ee Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bcf2ba147eb2b4d9c75a637aed2c96628a8369cd Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e12f44ed11522b1f45d2935972336fb6852cb26d Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7b0a2bef9c92372239c8136f4bacad2e44297a95 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..97e7161abe0534de78448dde1d119002283ced55 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a4e88280321db778bbdff6fce3aed8e6ce6e4980 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..da9302ff29a560b5f10d3184f25d699fe2d9c186 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..2bb7c07363c9a44ea63fe96c827a34e296b8188c Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..9a8ac703dcd1b00c37aa6f8dc9a8a9b3d42145f6 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar new file mode 100644 index 0000000000000000000000000000000000000000..f0bdebeb94bce461c49ded7e28d6e6c63bf6a367 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b3c8ff0adc42f592363a883cd691d292aada837 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa7fdabb307af8221e7e0a1526f2c97d6ba35ec4 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..49953e8fa25ed42f4127011561a6e84869fe5d82 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..d139727d04b6b6acdfcb520566c8c60cbbcb7fb1 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6077c71e50c276649060a8fac39f6384fa67019 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d2e53ffaa9f0685843fd2b35fe18afd543249a Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c9b38d9c315bae8eb8c7d4eeacb26a8f2ca16085 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..50933a1fdc4d2f285542845bb89f9b34cef192f2 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..45581840b5a7e858949ee6198f8a1f7bd772fb32 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar new file mode 100644 index 0000000000000000000000000000000000000000..e3432492d5d204a1745e138497d9ede28ffb854e Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d2ade3e92ab51059b64ce3026377e1bd5991777 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ed4bb45df0ea4a6cb61ce9c7247b6de29ab4f98a Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..816ea17eb611606001129921b297615bcd2419fd Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..ffc43704764cd535486c28401c92f2548bd52c5f Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..0e9fcc76760c4fc3fe5fbce69a4a37783d912a11 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..d7987d7c1b270f153557179abaf61c87ed62e875 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d218bacf88c766dba04ef14130fd7e69181ffed Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e90b7796c1a5d7264bc613050e05f20a674c010 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa3474de5d48916f2ded456deab5671a1375645b Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..657afffffdf6324a62474b9499dd50cf9529609f Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..26cbae47e5f0db078fc43501ed3d1b4f95a1befa Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a66a92ba5247b5a1f8e201d11643e4fb11c2a8c Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..8da196f20fde587682295ac0c90f31ba4ab23815 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..31cf0b60867242d385d764dcea99adadf7ed6ded Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..89c644b8e286e9da107d81de25f1be0fe6447607 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..d530cd128ec0d314490c0e1e5ef68479cd23d366 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..091b4d13a417d635f5a1d7a42b482f88a6f3bf65 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..e58c483f97589c9712eda2273a37e25344573390 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar new file mode 100644 index 0000000000000000000000000000000000000000..a2cb8020a5afda869b487e2f9d172dcd1e9795bf Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..511ff3211d9b29bce06c3576ddcf0139fc874bb0 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..7707e5878b8525da8750949186a3ab1056ecb5c5 Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..275dd5700a389ba1902a02d49e465157942368ce Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus-app-dependencies.txt b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus-app-dependencies.txt new file mode 100644 index 0000000000000000000000000000000000000000..0aa0b1a4e39e9ac14c9739186a382f5a7784a7d6 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus-app-dependencies.txt @@ -0,0 +1,258 @@ +com.aayushatharva.brotli4j:brotli4j::jar:1.16.0 +com.aayushatharva.brotli4j:native-linux-x86_64::jar:1.16.0 +com.aayushatharva.brotli4j:service::jar:1.16.0 +com.cronutils:cron-utils::jar:9.2.1 +com.fasterxml.jackson.core:jackson-annotations::jar:2.17.2 +com.fasterxml.jackson.core:jackson-core::jar:2.17.2 +com.fasterxml.jackson.core:jackson-databind::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-toml::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-yaml::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jdk8::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jsr310::jar:2.17.2 +com.fasterxml.jackson.module:jackson-module-parameter-names::jar:2.17.2 +com.github.ben-manes.caffeine:caffeine::jar:3.1.5 +com.github.filosganga:geogson-core::jar:1.2.21 +com.github.filosganga:geogson-jts::jar:1.2.21 +com.github.luben:zstd-jni::jar:1.5.6-3 +com.google.code.gson:gson::jar:2.11.0 +com.google.errorprone:error_prone_annotations::jar:2.30.0 +com.google.guava:failureaccess::jar:1.0.1 +com.google.guava:guava::jar:33.2.1-jre +com.google.j2objc:j2objc-annotations::jar:2.8 +com.ongres.scram:client::jar:2.1 +com.ongres.scram:common::jar:2.1 +com.ongres.stringprep:saslprep::jar:1.1 +com.ongres.stringprep:stringprep::jar:1.1 +com.vividsolutions:jts-core::jar:1.14.0 +commons-codec:commons-codec::jar:1.17.1 +commons-io:commons-io::jar:2.16.1 +eu.neclab.ngsildbroker:commons::jar:5.0.5-SNAPSHOT +io.agroal:agroal-api::jar:2.5 +io.agroal:agroal-narayana::jar:2.5 +io.agroal:agroal-pool::jar:2.5 +io.github.crac:org-crac::jar:0.1.3 +io.netty:netty-buffer::jar:4.1.111.Final +io.netty:netty-codec-dns::jar:4.1.111.Final +io.netty:netty-codec-haproxy::jar:4.1.111.Final +io.netty:netty-codec-http2::jar:4.1.111.Final +io.netty:netty-codec-http::jar:4.1.111.Final +io.netty:netty-codec-mqtt::jar:4.1.111.Final +io.netty:netty-codec-socks::jar:4.1.111.Final +io.netty:netty-codec::jar:4.1.111.Final +io.netty:netty-common::jar:4.1.111.Final +io.netty:netty-handler-proxy::jar:4.1.111.Final +io.netty:netty-handler::jar:4.1.111.Final +io.netty:netty-resolver-dns::jar:4.1.111.Final +io.netty:netty-resolver::jar:4.1.111.Final +io.netty:netty-transport-classes-epoll::jar:4.1.111.Final +io.netty:netty-transport-classes-kqueue::jar:4.1.111.Final +io.netty:netty-transport-native-epoll:linux-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-kqueue:osx-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-unix-common::jar:4.1.111.Final +io.netty:netty-transport::jar:4.1.111.Final +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api-incubator::jar:2.5.0-alpha +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api::jar:2.5.0 +io.opentelemetry.semconv:opentelemetry-semconv::jar:1.26.0-alpha +io.opentelemetry:opentelemetry-api-incubator::jar:1.39.0-alpha +io.opentelemetry:opentelemetry-api::jar:1.39.0 +io.opentelemetry:opentelemetry-context::jar:1.39.0 +io.quarkiverse.loggingmanager:quarkus-logging-manager::jar:3.1.2 +io.quarkiverse.systemd.notify:quarkus-systemd-notify::jar:1.0.2 +io.quarkus.arc:arc::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common-types::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-jackson::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-vertx::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive::jar:3.14.1 +io.quarkus.security:quarkus-security::jar:2.1.0 +io.quarkus.vertx.utils:quarkus-vertx-utils::jar:3.14.1 +io.quarkus:quarkus-agroal::jar:3.14.1 +io.quarkus:quarkus-arc::jar:3.14.1 +io.quarkus:quarkus-bootstrap-runner::jar:3.14.1 +io.quarkus:quarkus-cache-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-cache::jar:3.14.1 +io.quarkus:quarkus-caffeine::jar:3.14.1 +io.quarkus:quarkus-classloader-commons::jar:3.14.1 +io.quarkus:quarkus-container-image-docker-common::jar:3.14.1 +io.quarkus:quarkus-container-image-docker::jar:3.14.1 +io.quarkus:quarkus-container-image::jar:3.14.1 +io.quarkus:quarkus-core::jar:3.14.1 +io.quarkus:quarkus-credentials::jar:3.14.1 +io.quarkus:quarkus-datasource-common::jar:3.14.1 +io.quarkus:quarkus-datasource::jar:3.14.1 +io.quarkus:quarkus-development-mode-spi::jar:3.14.1 +io.quarkus:quarkus-flyway-postgresql::jar:3.14.1 +io.quarkus:quarkus-flyway::jar:3.14.1 +io.quarkus:quarkus-fs-util::jar:0.0.10 +io.quarkus:quarkus-info-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-info::jar:3.14.1 +io.quarkus:quarkus-jackson::jar:3.14.1 +io.quarkus:quarkus-jdbc-postgresql::jar:3.14.1 +io.quarkus:quarkus-jsonp::jar:3.14.1 +io.quarkus:quarkus-kafka-client::jar:3.14.1 +io.quarkus:quarkus-messaging-kafka::jar:3.14.1 +io.quarkus:quarkus-messaging-kotlin::jar:3.14.1 +io.quarkus:quarkus-messaging::jar:3.14.1 +io.quarkus:quarkus-mutiny-reactive-streams-operators::jar:3.14.1 +io.quarkus:quarkus-mutiny::jar:3.14.1 +io.quarkus:quarkus-narayana-jta::jar:3.14.1 +io.quarkus:quarkus-netty::jar:3.14.1 +io.quarkus:quarkus-reactive-datasource::jar:3.14.1 +io.quarkus:quarkus-reactive-pg-client::jar:3.14.1 +io.quarkus:quarkus-rest-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson::jar:3.14.1 +io.quarkus:quarkus-rest::jar:3.14.1 +io.quarkus:quarkus-scheduler-api::jar:3.14.1 +io.quarkus:quarkus-scheduler-common::jar:3.14.1 +io.quarkus:quarkus-scheduler-kotlin::jar:3.14.1 +io.quarkus:quarkus-scheduler-spi::jar:3.14.1 +io.quarkus:quarkus-scheduler::jar:3.14.1 +io.quarkus:quarkus-security-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-smallrye-context-propagation::jar:3.14.1 +io.quarkus:quarkus-smallrye-health::jar:3.14.1 +io.quarkus:quarkus-smallrye-metrics::jar:3.14.1 +io.quarkus:quarkus-smallrye-openapi::jar:3.14.1 +io.quarkus:quarkus-swagger-ui::jar:3.14.1 +io.quarkus:quarkus-tls-registry::jar:3.14.1 +io.quarkus:quarkus-transaction-annotations::jar:3.14.1 +io.quarkus:quarkus-vertx-http::jar:3.14.1 +io.quarkus:quarkus-vertx-latebound-mdc-provider::jar:3.14.1 +io.quarkus:quarkus-vertx::jar:3.14.1 +io.quarkus:quarkus-virtual-threads::jar:3.14.1 +io.smallrye.common:smallrye-common-annotation::jar:2.5.0 +io.smallrye.common:smallrye-common-classloader::jar:2.5.0 +io.smallrye.common:smallrye-common-constraint::jar:2.5.0 +io.smallrye.common:smallrye-common-cpu::jar:2.5.0 +io.smallrye.common:smallrye-common-expression::jar:2.5.0 +io.smallrye.common:smallrye-common-function::jar:2.5.0 +io.smallrye.common:smallrye-common-io::jar:2.5.0 +io.smallrye.common:smallrye-common-net::jar:2.5.0 +io.smallrye.common:smallrye-common-os::jar:2.5.0 +io.smallrye.common:smallrye-common-ref::jar:2.5.0 +io.smallrye.common:smallrye-common-vertx-context::jar:2.5.0 +io.smallrye.config:smallrye-config-common::jar:3.9.1 +io.smallrye.config:smallrye-config-core::jar:3.9.1 +io.smallrye.config:smallrye-config::jar:3.9.1 +io.smallrye.reactive:mutiny-reactive-streams-operators::jar:2.6.2 +io.smallrye.reactive:mutiny-smallrye-context-propagation::jar:2.6.2 +io.smallrye.reactive:mutiny-zero-flow-adapters::jar:1.1.0 +io.smallrye.reactive:mutiny-zero::jar:1.1.0 +io.smallrye.reactive:mutiny::jar:2.6.2 +io.smallrye.reactive:smallrye-mutiny-vertx-auth-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-bridge-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-core::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-mqtt::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-pg-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-runtime::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-sql-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-uri-template::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web::jar:3.14.0 +io.smallrye.reactive:smallrye-reactive-converter-api::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-converter-mutiny::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-messaging-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-camel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-health::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-in-memory::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-otel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-provider::jar:4.24.0 +io.smallrye.reactive:vertx-mutiny-generator::jar:3.14.0 +io.smallrye:jandex::jar:3.2.2 +io.smallrye:smallrye-context-propagation-api::jar:2.1.2 +io.smallrye:smallrye-context-propagation-jta::jar:2.1.2 +io.smallrye:smallrye-context-propagation-storage::jar:2.1.2 +io.smallrye:smallrye-context-propagation::jar:2.1.2 +io.smallrye:smallrye-fault-tolerance-vertx::jar:6.4.0 +io.smallrye:smallrye-health-api::jar:4.1.0 +io.smallrye:smallrye-health-provided-checks::jar:4.1.0 +io.smallrye:smallrye-health::jar:4.1.0 +io.smallrye:smallrye-metrics::jar:4.0.0 +io.smallrye:smallrye-open-api-core::jar:3.10.0 +io.vertx:vertx-auth-common::jar:4.5.9 +io.vertx:vertx-bridge-common::jar:4.5.9 +io.vertx:vertx-codegen::jar:4.5.9 +io.vertx:vertx-core::jar:4.5.9 +io.vertx:vertx-mqtt::jar:4.5.9 +io.vertx:vertx-pg-client::jar:4.5.9 +io.vertx:vertx-sql-client::jar:4.5.9 +io.vertx:vertx-uri-template::jar:4.5.9 +io.vertx:vertx-web-client::jar:4.5.9 +io.vertx:vertx-web-common::jar:4.5.9 +io.vertx:vertx-web::jar:4.5.9 +jakarta.activation:jakarta.activation-api::jar:2.1.3 +jakarta.annotation:jakarta.annotation-api::jar:3.0.0 +jakarta.el:jakarta.el-api::jar:5.0.1 +jakarta.enterprise:jakarta.enterprise.cdi-api::jar:4.1.0 +jakarta.enterprise:jakarta.enterprise.lang-model::jar:4.1.0 +jakarta.inject:jakarta.inject-api::jar:2.0.1 +jakarta.interceptor:jakarta.interceptor-api::jar:2.2.0 +jakarta.json:jakarta.json-api::jar:2.1.3 +jakarta.resource:jakarta.resource-api::jar:2.1.0 +jakarta.transaction:jakarta.transaction-api::jar:2.0.1 +jakarta.ws.rs:jakarta.ws.rs-api::jar:3.1.0 +jakarta.xml.bind:jakarta.xml.bind-api::jar:4.0.2 +org.apache.camel.quarkus:camel-quarkus-core::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-reactive-streams::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-smallrye-reactive-messaging::jar:3.14.0 +org.apache.camel:camel-api::jar:4.7.0 +org.apache.camel:camel-base-engine::jar:4.7.0 +org.apache.camel:camel-base::jar:4.7.0 +org.apache.camel:camel-componentdsl::jar:4.7.0 +org.apache.camel:camel-core-catalog::jar:4.7.0 +org.apache.camel:camel-core-engine::jar:4.7.0 +org.apache.camel:camel-core-languages::jar:4.7.0 +org.apache.camel:camel-core-model::jar:4.7.0 +org.apache.camel:camel-core-processor::jar:4.7.0 +org.apache.camel:camel-core-reifier::jar:4.7.0 +org.apache.camel:camel-endpointdsl::jar:4.7.0 +org.apache.camel:camel-main::jar:4.7.0 +org.apache.camel:camel-management-api::jar:4.7.0 +org.apache.camel:camel-microprofile-config::jar:4.7.0 +org.apache.camel:camel-reactive-streams::jar:4.7.0 +org.apache.camel:camel-support::jar:4.7.0 +org.apache.camel:camel-tooling-model::jar:4.7.0 +org.apache.camel:camel-util-json::jar:4.7.0 +org.apache.camel:camel-util::jar:4.7.0 +org.apache.camel:camel-xml-jaxp-util::jar:4.7.0 +org.apache.commons:commons-lang3::jar:3.14.0 +org.apache.httpcomponents:httpclient-cache::jar:4.5.14 +org.apache.httpcomponents:httpclient::jar:4.5.14 +org.apache.httpcomponents:httpcore::jar:4.4.16 +org.apache.kafka:kafka-clients::jar:3.7.1 +org.checkerframework:checker-qual::jar:3.46.0 +org.eclipse.microprofile.config:microprofile-config-api::jar:3.1 +org.eclipse.microprofile.context-propagation:microprofile-context-propagation-api::jar:1.3 +org.eclipse.microprofile.health:microprofile-health-api::jar:4.0.1 +org.eclipse.microprofile.metrics:microprofile-metrics-api::jar:4.0.1 +org.eclipse.microprofile.openapi:microprofile-openapi-api::jar:3.1.1 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-api::jar:3.0 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-core::jar:3.0 +org.eclipse.parsson:parsson::jar:1.1.7 +org.flywaydb:flyway-core::jar:10.17.1 +org.flywaydb:flyway-database-postgresql::jar:10.17.1 +org.glassfish.expressly:expressly::jar:5.0.0 +org.jboss.invocation:jboss-invocation::jar:2.0.0.Final +org.jboss.logging:commons-logging-jboss-logging::jar:1.0.0.Final +org.jboss.logging:jboss-logging-annotations::jar:3.0.1.Final +org.jboss.logging:jboss-logging::jar:3.6.0.Final +org.jboss.logmanager:jboss-logmanager::jar:3.0.6.Final +org.jboss.narayana.jta:narayana-jta::jar:7.0.2.Final +org.jboss.narayana.jts:narayana-jts-integration::jar:7.0.2.Final +org.jboss.slf4j:slf4j-jboss-logmanager::jar:2.0.0.Final +org.jboss.threads:jboss-threads::jar:3.6.1.Final +org.jboss:jboss-transaction-spi::jar:8.0.0.Final +org.jctools:jctools-core::jar:4.0.5 +org.locationtech.jts:jts-core::jar:1.18.2 +org.locationtech.spatial4j:spatial4j::jar:0.8 +org.lz4:lz4-java::jar:1.8.0 +org.noggit:noggit::jar:0.8 +org.postgresql:postgresql::jar:42.7.4 +org.reactivestreams:reactive-streams::jar:1.0.4 +org.slf4j:slf4j-api::jar:2.0.6 +org.wildfly.common:wildfly-common::jar:1.7.0.Final +org.xerial.snappy:snappy-java::jar:1.1.10.5 +org.yaml:snakeyaml::jar:2.2 diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus-run.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus-run.jar new file mode 100644 index 0000000000000000000000000000000000000000..b2a54d07dcd1a4772b0838ce90223fbb9feba32e Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus-run.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus/generated-bytecode.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus/generated-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..2098d0902064eafa89eeef27c48e0b9e9381370e Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus/generated-bytecode.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus/quarkus-application.dat b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus/quarkus-application.dat new file mode 100644 index 0000000000000000000000000000000000000000..414ff5ed963824f5a73ae9f5c7f5e5888722251c Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus/quarkus-application.dat differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus/transformed-bytecode.jar b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus/transformed-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..ea54338c516894520254aa0113378a82dbd501fc Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-app/quarkus/transformed-bytecode.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/quarkus-artifact.properties b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-artifact.properties new file mode 100644 index 0000000000000000000000000000000000000000..398cbe94030d620385a833d477929c6bac229400 --- /dev/null +++ b/scorpio-broker/RegistrySubscriptionManager/target/quarkus-artifact.properties @@ -0,0 +1,4 @@ +# Generated by Quarkus - Do not edit manually +metadata.container-image=scorpiobroker/registry-subscription-manager\:5.0.5-SNAPSHOT +metadata.pull-required=false +type=jar-container diff --git a/scorpio-broker/RegistrySubscriptionManager/target/registry-subscription-manager-5.0.5-SNAPSHOT.jar b/scorpio-broker/RegistrySubscriptionManager/target/registry-subscription-manager-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..b0a9886929fd23b3a0928458fab107a0069704bf Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/registry-subscription-manager-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/CustomProfile.class b/scorpio-broker/RegistrySubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/CustomProfile.class new file mode 100644 index 0000000000000000000000000000000000000000..2a7b59a0de2253277b777dbaac91bc8f8db6487a Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/CustomProfile.class differ diff --git a/scorpio-broker/RegistrySubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionControllerTest.class b/scorpio-broker/RegistrySubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionControllerTest.class new file mode 100644 index 0000000000000000000000000000000000000000..9339060b8ae4671f241ca6ffc067def99a3ed73f Binary files /dev/null and b/scorpio-broker/RegistrySubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionControllerTest.class differ diff --git a/scorpio-broker/ScorpioBroker-Entity.pdf b/scorpio-broker/ScorpioBroker-Entity.pdf new file mode 100644 index 0000000000000000000000000000000000000000..18f498bad30f4ead617ea0ae24908513485f2942 --- /dev/null +++ b/scorpio-broker/ScorpioBroker-Entity.pdf @@ -0,0 +1,1057 @@ +%PDF-1.3 +%ÿÿÿÿ +1 0 obj +<< /Creator +/Producer +>> +endobj +2 0 obj +<< /Type /Catalog +/Pages 3 0 R +>> +endobj +3 0 obj +<< /Type /Pages +/Count 4 +/Kids [5 0 R 10 0 R 12 0 R 14 0 R] +>> +endobj +4 0 obj +<< /Length 8706 +>> +stream +q + +BT +167.472 723.804 Td +/F2.0 12 Tf +[<53636f72> 10 <70696f4272> 18 <6f6b> 10 <657220456e7469747920436f6e74726962> 20 <75746f72204c6963656e736520416772> 18 <65656d656e74>] TJ +ET + + +BT +60.0 699.968 Td +/F1.0 12 Tf +[<5468616e6b20796f7520666f7220796f757220696e74657265737420696e20636f6e74726962> 20 <7574696e6720746f2053636f7270696f42726f6b> 10 <657220282257> 80 <6522206f722022557322292e>] TJ +ET + + +BT +60.0 676.576 Td +/F1.0 12 Tf +[<5468697320636f6e74726962> 20 <75746f722061677265656d656e7420282241677265656d656e74222920646f63756d656e74732074686520726967687473206772616e74656420627920636f6e74726962> 20 <75746f727320746f2055732e2054> 80 <6f>] TJ +ET + + +BT +60.0 663.184 Td +/F1.0 12 Tf +[<6d616b> 10 <65207468697320646f63756d656e74206566> 25 <6665637469> 25 <76> 15 <652c20706c65617365207369676e20697420616e642073656e6420697420746f20557320627920656d61696c2c20666f6c6c6f> 25 <77696e672074686520696e737472756374696f6e73206174>] TJ +ET + + +BT +60.0 649.792 Td +/F1.0 12 Tf +[<68747470733a2f2f676974687562> 40 <2e636f6d2f53636f7270696f42726f6b> 10 <65722f53636f7270696f42726f6b> 10 <65722f626c6f622f6465> 25 <76> 15 <656c6f706d656e742f434f4e54524942> 10 <5554494e472e6d642e20546869732069732061>] TJ +ET + + +BT +60.0 636.4 Td +/F1.0 12 Tf +[<6c65> 15 <67> 5 <616c6c792062696e64696e6720646f63756d656e742c20736f20706c656173652072656164206974206361726566756c6c79206265666f7265206167726565696e6720746f2069742e205468652041677265656d656e74206d617920636f> 15 <76> 15 <6572>] TJ +ET + + +BT +60.0 623.008 Td +/F1.0 12 Tf +[<6d6f7265207468616e206f6e6520736f667477> 10 <6172652070726f6a656374206d616e616765642062792055732e>] TJ +ET + + +BT +60.0 599.616 Td +/F2.0 12 Tf +[<312e20446566> 25 <696e6974696f6e73>] TJ +ET + + +BT +80.0 575.78 Td +/F1.0 12 Tf +[<2259> 110 <6f7522206d65616e7320616e> 15 <79204c65> 15 <67> 5 <616c20456e74697479206f6e20626568616c66206f662077686f6d206120436f6e74726962> 20 <7574696f6e20686173206265656e207265636569> 25 <76> 15 <65642062792055732e20224c65> 15 <67> 5 <616c>] TJ +ET + + +BT +80.0 562.388 Td +/F1.0 12 Tf +[<456e7469747922206d65616e7320616e20656e74697479207768696368206973206e6f742061206e61747572616c20706572736f6e2e20224166> 25 <66> 20 <696c696174657322206d65616e73206f74686572204c65> 15 <67> 5 <616c20456e7469746965732074686174>] TJ +ET + + +BT +80.0 548.996 Td +/F1.0 12 Tf +[<636f6e74726f6c2c2061726520636f6e74726f6c6c6564206279> 65 <2c206f7220756e64657220636f6d6d6f6e20636f6e74726f6c20776974682074686174204c65> 15 <67> 5 <616c20456e74697479> 65 <2e2046> 15 <6f722074686520707572706f736573206f66>] TJ +ET + + +BT +80.0 535.604 Td +/F1.0 12 Tf +[<7468697320646566> 20 <696e6974696f6e2c2022636f6e74726f6c22206d65616e73202869292074686520706f> 25 <776572> 40 <2c20646972656374206f7220696e6469726563742c20746f2063617573652074686520646972656374696f6e206f72>] TJ +ET + + +BT +80.0 522.212 Td +/F1.0 12 Tf +[<6d616e6167656d656e74206f662073756368204c65> 15 <67> 5 <616c20456e74697479> 65 <2c207768657468657220627920636f6e7472616374206f72206f74686572776973652c2028696929206f> 25 <776e657273686970206f662066> 20 <69667479>] TJ +ET + + +BT +80.0 508.82 Td +/F1.0 12 Tf +[<70657263656e74202835302529206f72206d6f7265206f6620746865206f75747374616e64696e6720736861726573206f7220736563757269746965732077686963682076> 20 <6f746520746f20656c65637420746865206d616e6167656d656e74>] TJ +ET + + +BT +80.0 495.428 Td +/F1.0 12 Tf +[<6f72206f7468657220706572736f6e732077686f206469726563742073756368204c65> 15 <67> 5 <616c20456e74697479206f722028696969292062656e6566> 20 <696369616c206f> 25 <776e657273686970206f66207375636820656e74697479> 65 <2e>] TJ +ET + + +BT +80.0 472.036 Td +/F1.0 12 Tf +[<22436f6e74726962> 20 <7574696f6e22206d65616e7320616e> 15 <792077> 10 <6f726b206f6620617574686f72736869702074686174206973205375626d69747465642062792059> 110 <6f7520746f20557320696e2077686963682059> 110 <6f75206f> 25 <776e>] TJ +ET + + +BT +80.0 458.644 Td +/F1.0 12 Tf +[<6f7220617373657274206f> 25 <776e657273686970206f662074686520436f70> 10 <7972696768742e2049662059> 110 <6f7520646f206e6f74206f> 25 <776e2074686520436f70> 10 <79726967687420696e2074686520656e746972652077> 10 <6f726b206f66>] TJ +ET + + +BT +80.0 445.252 Td +/F1.0 12 Tf +[<617574686f72736869702c20706c6561736520666f6c6c6f> 25 <772074686520696e737472756374696f6e7320696e202e>] TJ +ET + + +BT +80.0 421.86 Td +/F1.0 12 Tf +[<22436f70> 10 <79726967687422206d65616e7320616c6c207269676874732070726f74656374696e672077> 10 <6f726b73206f6620617574686f7273686970206f> 25 <776e6564206f7220636f6e74726f6c6c65642062792059> 110 <6f75206f722059> 110 <6f7572>] TJ +ET + + +BT +80.0 408.468 Td +/F1.0 12 Tf +[<4166> 25 <66> 20 <696c69617465732c20696e636c7564696e6720636f70> 10 <7972696768742c206d6f72616c20616e64206e65696768626f72696e67207269676874732c20617320617070726f7072696174652c20666f72207468652066756c6c207465726d206f66>] TJ +ET + + +BT +80.0 395.076 Td +/F1.0 12 Tf +[<74686569722065> 15 <78697374656e636520696e636c7564696e6720616e> 15 <792065> 15 <7874656e73696f6e732062792059> 110 <6f752e>] TJ +ET + + +BT +80.0 371.684 Td +/F1.0 12 Tf +[<224d6174657269616c22206d65616e73207468652077> 10 <6f726b206f6620617574686f7273686970207768696368206973206d6164652061> 20 <76> 25 <61696c61626c6520627920557320746f20746869726420706172746965732e205768656e>] TJ +ET + + +BT +80.0 358.292 Td +/F1.0 12 Tf +[<746869732041677265656d656e7420636f> 15 <76> 15 <657273206d6f7265207468616e206f6e6520736f667477> 10 <6172652070726f6a6563742c20746865204d6174657269616c206d65616e73207468652077> 10 <6f726b206f6620617574686f7273686970>] TJ +ET + + +BT +80.0 344.9 Td +/F1.0 12 Tf +[<746f2077686963682074686520436f6e74726962> 20 <7574696f6e2077> 10 <6173205375626d69747465642e2041667465722059> 110 <6f75205375626d69742074686520436f6e74726962> 20 <7574696f6e2c206974206d617920626520696e636c75646564>] TJ +ET + + +BT +80.0 331.508 Td +/F1.0 12 Tf +[<696e20746865204d6174657269616c2e>] TJ +ET + + +BT +80.0 308.116 Td +/F1.0 12 Tf +[<225375626d69742220206d65616e7320616e> 15 <7920666f726d206f6620656c656374726f6e69632c2076> 15 <657262616c2c206f72207772697474656e20636f6d6d756e69636174696f6e2073656e7420746f205573206f72206f7572>] TJ +ET + + +BT +80.0 294.724 Td +/F1.0 12 Tf +[<726570726573656e74617469> 25 <76> 15 <65732c20696e636c7564696e672062> 20 <7574206e6f74206c696d6974656420746f20656c656374726f6e6963206d61696c696e67206c697374732c20736f7572636520636f646520636f6e74726f6c2073797374656d732c>] TJ +ET + + +BT +80.0 281.332 Td +/F1.0 12 Tf +[<616e6420697373756520747261636b696e672073797374656d73207468617420617265206d616e61676564206279> 65 <2c206f72206f6e20626568616c66206f662c20557320666f722074686520707572706f7365206f662064697363757373696e67>] TJ +ET + + +BT +80.0 267.94 Td +/F1.0 12 Tf +[<616e6420696d70726f> 15 <76696e6720746865204d6174657269616c2c2062> 20 <75742065> 15 <78636c7564696e6720636f6d6d756e69636174696f6e207468617420697320636f6e73706963756f75736c79206d61726b> 10 <6564206f72>] TJ +ET + + +BT +80.0 254.548 Td +/F1.0 12 Tf +[<6f74686572776973652064657369676e6174656420696e2077726974696e672062792059> 110 <6f7520617320224e6f74206120436f6e74726962> 20 <7574696f6e2e22>] TJ +ET + + +BT +80.0 231.156 Td +/F1.0 12 Tf +[<225375626d697373696f6e204461746522206d65616e73207468652064617465206f6e2077686963682059> 110 <6f75205375626d6974206120436f6e74726962> 20 <7574696f6e20746f2055732e>] TJ +ET + + +BT +80.0 207.764 Td +/F1.0 12 Tf +[<224566> 25 <6665637469> 25 <76> 15 <65204461746522206d65616e732074686520646174652059> 110 <6f752065> 15 <78> 15 <656375746520746869732041677265656d656e74206f722074686520646174652059> 110 <6f752066> 20 <69727374205375626d69742061>] TJ +ET + + +BT +80.0 194.372 Td +/F1.0 12 Tf +[<436f6e74726962> 20 <7574696f6e20746f2055732c20776869636865> 25 <76> 15 <6572206973206561726c696572> 55 <2e>] TJ +ET + + +BT +80.0 170.98 Td +/F1.0 12 Tf +[<224d6564696122206d65616e7320616e> 15 <7920706f7274696f6e206f66206120436f6e74726962> 20 <7574696f6e207768696368206973206e6f7420736f667477> 10 <6172652e>] TJ +ET + + +BT +60.0 147.588 Td +/F2.0 12 Tf +[<322e204772616e74206f6620526967687473>] TJ +ET + + +BT +60.0 123.752 Td +/F1.0 12 Tf +[<322e3120436f70> 10 <797269676874204c6963656e7365>] TJ +ET + + +BT +60.0 100.36 Td +/F1.0 12 Tf +[<2861292059> 110 <6f752072657461696e206f> 25 <776e657273686970206f662074686520436f70> 10 <79726967687420696e2059> 110 <6f757220436f6e74726962> 20 <7574696f6e20616e64206861> 20 <76> 15 <65207468652073616d652072696768747320746f20757365206f72>] TJ +ET + + +BT +60.0 86.968 Td +/F1.0 12 Tf +[<6c6963656e73652074686520436f6e74726962> 20 <7574696f6e2077686963682059> 110 <6f752077> 10 <6f756c64206861> 20 <76> 15 <652068616420776974686f757420656e746572696e6720696e746f207468652041677265656d656e742e>] TJ +ET + + +BT +60.0 63.576 Td +/F1.0 12 Tf +[<2862292054> 80 <6f20746865206d6178696d756d2065> 15 <7874656e74207065726d6974746564206279207468652072656c65> 25 <76> 25 <616e74206c61> 15 <77> 65 <2c2059> 110 <6f75206772616e7420746f20557320612070657270657475616c2c2077> 10 <6f726c64776964652c>] TJ +ET + + +BT +533.336 44.536 Td +/F1.0 8 Tf +[<31206f662034>] TJ +ET + +/Stamp1 Do +Q + +endstream +endobj +5 0 obj +<< /Type /Page +/Parent 3 0 R +/MediaBox [0 0 612.0 792.0] +/Contents 4 0 R +/Resources << /ProcSet [/PDF /Text /ImageB /ImageC /ImageI] +/Font << /F2.0 8 0 R +/F1.0 7 0 R +>> +/XObject << /Stamp1 6 0 R +>> +>> +>> +endobj +6 0 obj +<< /Type /XObject +/Subtype /Form +/BBox [0 0 612.0 792.0] +/Resources << /Font << /F1.0 7 0 R +>> +>> +/Length 208 +>> +stream +q +/DeviceRGB cs +0.000 0.000 0.000 scn +/DeviceRGB CS +0.000 0.000 0.000 SCN +1 w +0 J +0 j +[] 0 d + +BT +60.0 50.0 Td +/F1.0 8 Tf +[<4861726d6f6e> 15 <79202848412d434c412d45292056> 111 <657273696f6e20312e30>] TJ +ET + +Q + +endstream +endobj +7 0 obj +<< /Type /Font +/Subtype /Type1 +/BaseFont /Times-Roman +/Encoding /WinAnsiEncoding +>> +endobj +8 0 obj +<< /Type /Font +/Subtype /Type1 +/BaseFont /Times-Bold +/Encoding /WinAnsiEncoding +>> +endobj +9 0 obj +<< /Length 8055 +>> +stream +q + +BT +60.0 723.804 Td +/F1.0 12 Tf +[<6e6f6e2d65> 15 <78636c757369> 25 <76> 15 <652c207472616e7366657261626c652c20726f> 10 <79616c74792d667265652c2069727265> 25 <76> 20 <6f6361626c65206c6963656e736520756e6465722074686520436f70> 10 <79726967687420636f> 15 <76> 15 <6572696e6720746865>] TJ +ET + + +BT +60.0 710.412 Td +/F1.0 12 Tf +[<436f6e74726962> 20 <7574696f6e2c20776974682074686520726967687420746f207375626c6963656e7365207375636820726967687473207468726f756768206d756c7469706c65207469657273206f66207375626c6963656e736565732c20746f>] TJ +ET + + +BT +60.0 697.02 Td +/F1.0 12 Tf +[<726570726f647563652c206d6f64696679> 65 <2c20646973706c6179> 65 <2c20706572666f726d20616e642064697374726962> 20 <7574652074686520436f6e74726962> 20 <7574696f6e2061732070617274206f6620746865204d6174657269616c3b2070726f> 15 <7669646564>] TJ +ET + + +BT +60.0 683.628 Td +/F1.0 12 Tf +[<746861742074686973206c6963656e736520697320636f6e646974696f6e65642075706f6e20636f6d706c69616e636520776974682053656374696f6e20322e332e>] TJ +ET + + +BT +60.0 660.236 Td +/F1.0 12 Tf +[<322e322050> 15 <6174656e74204c6963656e7365>] TJ +ET + + +BT +60.0 636.844 Td +/F1.0 12 Tf +[<46> 15 <6f7220706174656e7420636c61696d7320696e636c7564696e672c20776974686f7574206c696d69746174696f6e2c206d6574686f642c2070726f636573732c20616e642061707061726174757320636c61696d732077686963682059> 110 <6f75206f72>] TJ +ET + + +BT +60.0 623.452 Td +/F1.0 12 Tf +[<59> 110 <6f7572204166> 25 <66> 20 <696c6961746573206f> 25 <776e2c20636f6e74726f6c206f72206861> 20 <76> 15 <652074686520726967687420746f206772616e742c206e6f> 25 <77206f7220696e20746865206675747572652c2059> 110 <6f75206772616e7420746f2055732061>] TJ +ET + + +BT +60.0 610.06 Td +/F1.0 12 Tf +[<70657270657475616c2c2077> 10 <6f726c64776964652c206e6f6e2d65> 15 <78636c757369> 25 <76> 15 <652c207472616e7366657261626c652c20726f> 10 <79616c74792d667265652c2069727265> 25 <76> 20 <6f6361626c6520706174656e74206c6963656e73652c207769746820746865>] TJ +ET + + +BT +60.0 596.668 Td +/F1.0 12 Tf +[<726967687420746f207375626c6963656e73652074686573652072696768747320746f206d756c7469706c65207469657273206f66207375626c6963656e736565732c20746f206d616b> 10 <652c206861> 20 <76> 15 <65206d6164652c207573652c2073656c6c2c206f66> 25 <666572>] TJ +ET + + +BT +60.0 583.276 Td +/F1.0 12 Tf +[<666f722073616c652c20696d706f727420616e64206f7468657277697365207472616e736665722074686520436f6e74726962> 20 <7574696f6e20616e642074686520436f6e74726962> 20 <7574696f6e20696e20636f6d62696e6174696f6e207769746820746865>] TJ +ET + + +BT +60.0 569.884 Td +/F1.0 12 Tf +[<4d6174657269616c2028616e6420706f7274696f6e73206f66207375636820636f6d62696e6174696f6e292e2054686973206c6963656e7365206973206772616e746564206f6e6c7920746f207468652065> 15 <7874656e74207468617420746865>] TJ +ET + + +BT +60.0 556.492 Td +/F1.0 12 Tf +[<65> 15 <78> 15 <657263697365206f6620746865206c6963656e7365642072696768747320696e6672696e676573207375636820706174656e7420636c61696d733b20616e642070726f> 15 <766964656420746861742074686973206c6963656e7365206973>] TJ +ET + + +BT +60.0 543.1 Td +/F1.0 12 Tf +[<636f6e646974696f6e65642075706f6e20636f6d706c69616e636520776974682053656374696f6e20322e332e>] TJ +ET + + +BT +60.0 519.708 Td +/F1.0 12 Tf +[<322e33204f7574626f756e64204c6963656e7365>] TJ +ET + + +BT +60.0 496.316 Td +/F1.0 12 Tf +[<4173206120636f6e646974696f6e206f6e20746865206772616e74206f662072696768747320696e2053656374696f6e7320322e3120616e6420322e322c2057> 80 <6520616772656520746f206c6963656e73652074686520436f6e74726962> 20 <7574696f6e206f6e6c79>] TJ +ET + + +BT +60.0 482.924 Td +/F1.0 12 Tf +[<756e64657220746865207465726d73206f6620746865206c6963656e7365206f72206c6963656e7365732077686963682057> 80 <6520617265207573696e67206f6e20746865205375626d697373696f6e204461746520666f7220746865204d6174657269616c>] TJ +ET + + +BT +60.0 469.532 Td +/F1.0 12 Tf +[<28696e636c7564696e6720616e> 15 <792072696768747320746f2061646f707420616e> 15 <79206675747572652076> 15 <657273696f6e206f662061206c6963656e7365206966207065726d6974746564292e>] TJ +ET + + +BT +60.0 446.14 Td +/F1.0 12 Tf +[<496e206164646974696f6e2c2057> 80 <65206d6179207573652074686520666f6c6c6f> 25 <77696e67206c6963656e73657320666f72204d6564696120696e2074686520436f6e74726962> 20 <7574696f6e3a20437265617469> 25 <76> 15 <6520436f6d6d6f6e73>] TJ +ET + + +BT +60.0 432.748 Td +/F1.0 12 Tf +[<5a65726f2076312e3020556e69> 25 <76> 15 <657273616c2028696e636c7564696e6720616e> 15 <7920726967687420746f2061646f707420616e> 15 <79206675747572652076> 15 <657273696f6e206f662061206c6963656e7365206966207065726d6974746564292e>] TJ +ET + + +BT +60.0 409.356 Td +/F1.0 12 Tf +[<322e34204d6f72616c205269676874732e204966206d6f72616c20726967687473206170706c7920746f2074686520436f6e74726962> 20 <7574696f6e2c20746f20746865206d6178696d756d2065> 15 <7874656e74207065726d6974746564206279206c61> 15 <77> 65 <2c>] TJ +ET + + +BT +60.0 395.964 Td +/F1.0 12 Tf +[<59> 110 <6f752077> 10 <6169> 25 <76> 15 <6520616e64206167726565206e6f7420746f206173736572742073756368206d6f72616c20726967687473206167> 5 <61696e7374205573206f72206f757220737563636573736f727320696e20696e7465726573742c206f7220616e> 15 <79206f66>] TJ +ET + + +BT +60.0 382.572 Td +/F1.0 12 Tf +[<6f7572206c6963656e736565732c2065697468657220646972656374206f7220696e6469726563742e>] TJ +ET + + +BT +60.0 359.18 Td +/F1.0 12 Tf +[<322e35204f7572205269676874732e2059> 110 <6f752061636b6e6f> 25 <776c6564676520746861742057> 80 <6520617265206e6f74206f626c6967> 5 <6174656420746f207573652059> 110 <6f757220436f6e74726962> 20 <7574696f6e2061732070617274206f6620746865>] TJ +ET + + +BT +60.0 345.788 Td +/F1.0 12 Tf +[<4d6174657269616c20616e64206d61792064656369646520746f20696e636c75646520616e> 15 <7920436f6e74726962> 20 <7574696f6e2057> 80 <6520636f6e736964657220617070726f7072696174652e>] TJ +ET + + +BT +60.0 322.396 Td +/F1.0 12 Tf +[<322e3620526573657276> 25 <6174696f6e206f66205269676874732e20416e> 15 <7920726967687473206e6f742065> 15 <7870726573736c792061737369676e6564206f72206c6963656e73656420756e64657220746869732073656374696f6e20617265>] TJ +ET + + +BT +60.0 309.004 Td +/F1.0 12 Tf +[<65> 15 <7870726573736c7920726573657276> 15 <65642062792059> 110 <6f752e>] TJ +ET + + +BT +60.0 285.612 Td +/F2.0 12 Tf +[<332e20416772> 18 <65656d656e74>] TJ +ET + + +BT +60.0 261.776 Td +/F1.0 12 Tf +[<59> 110 <6f7520636f6e66> 20 <69726d20746861743a>] TJ +ET + + +BT +60.0 238.384 Td +/F1.0 12 Tf +[<2861292059> 110 <6f75206861> 20 <76> 15 <6520746865206c65> 15 <67> 5 <616c20617574686f7269747920746f20656e74657220696e746f20746869732041677265656d656e742e>] TJ +ET + + +BT +60.0 214.992 Td +/F1.0 12 Tf +[<2862292059> 110 <6f75206f722059> 110 <6f7572204166> 25 <66> 20 <696c6961746573206f> 25 <776e2074686520436f70> 10 <79726967687420616e6420706174656e7420636c61696d7320636f> 15 <76> 15 <6572696e672074686520436f6e74726962> 20 <7574696f6e20776869636820617265>] TJ +ET + + +BT +60.0 201.6 Td +/F1.0 12 Tf +[<726571756972656420746f206772616e74207468652072696768747320756e6465722053656374696f6e20322e>] TJ +ET + + +BT +60.0 178.208 Td +/F1.0 12 Tf +[<28632920546865206772616e74206f662072696768747320756e6465722053656374696f6e203220646f6573206e6f742076696f6c61746520616e> 15 <79206772616e74206f66207269676874732077686963682059> 110 <6f75206f722059> 110 <6f7572>] TJ +ET + + +BT +60.0 164.816 Td +/F1.0 12 Tf +[<4166> 25 <66> 20 <696c6961746573206861> 20 <76> 15 <65206d61646520746f20746869726420706172746965732e>] TJ +ET + + +BT +60.0 141.424 Td +/F1.0 12 Tf +[<2864292059> 110 <6f75206861> 20 <76> 15 <6520666f6c6c6f> 25 <7765642074686520696e737472756374696f6e7320696e202c2069662059> 110 <6f7520646f206e6f74206f> 25 <776e2074686520436f70> 10 <79726967687420696e2074686520656e746972652077> 10 <6f726b206f66>] TJ +ET + + +BT +60.0 128.032 Td +/F1.0 12 Tf +[<617574686f7273686970205375626d69747465642e>] TJ +ET + + +BT +60.0 104.64 Td +/F2.0 12 Tf +[<342e20446973636c61696d6572>] TJ +ET + + +BT +60.0 80.804 Td +/F1.0 12 Tf +[<45584345505420464f522054484520455850524553532057> 120 <415252414e5449455320494e2053454354494f4e20332c2054484520434f4e54524942> 10 <5554494f4e204953>] TJ +ET + + +BT +60.0 67.412 Td +/F1.0 12 Tf +[<5052> 40 <4f> 50 <564944454420224153204953222e204d4f52452050> 92 <4152> 60 <544943554c41524c> 100 <59> 129 <2c20414c4c2045585052455353204f5220494d504c4945442057> 120 <415252414e54494553>] TJ +ET + + +BT +533.336 44.536 Td +/F1.0 8 Tf +[<32206f662034>] TJ +ET + +/Stamp1 Do +Q + +endstream +endobj +10 0 obj +<< /Type /Page +/Parent 3 0 R +/MediaBox [0 0 612.0 792.0] +/Contents 9 0 R +/Resources << /ProcSet [/PDF /Text /ImageB /ImageC /ImageI] +/Font << /F1.0 7 0 R +/F2.0 8 0 R +>> +/XObject << /Stamp1 6 0 R +>> +>> +>> +endobj +11 0 obj +<< /Length 7840 +>> +stream +q + +BT +60.0 723.804 Td +/F1.0 12 Tf +[<494e434c5544494e472c20574954484f5554204c494d4954> 93 <41> 111 <54494f4e2c20414e5920494d504c4945442057> 120 <415252414e5459204f46204d45524348414e54> 93 <4142494c495459> 129 <2c>] TJ +ET + + +BT +60.0 710.412 Td +/F1.0 12 Tf +[<4649544e45535320464f5220412050> 92 <4152> 60 <544943554c415220505552504f534520414e44204e4f4e2d494e4652494e47454d454e542041524520455850524553534c> 100 <59>] TJ +ET + + +BT +60.0 697.02 Td +/F1.0 12 Tf +[<444953434c41494d45442042592059> 30 <4f552054> 18 <4f20555320414e442042592055532054> 18 <4f2059> 30 <4f552e2054> 18 <4f2054484520455854454e5420544841> 111 <5420414e592053554348>] TJ +ET + + +BT +60.0 683.628 Td +/F1.0 12 Tf +[<57> 120 <415252414e544945532043414e4e4f> 40 <5420424520444953434c41494d45442c20535543482057> 120 <415252414e5459204953204c494d4954454420494e2044555241> 111 <54494f4e>] TJ +ET + + +BT +60.0 670.236 Td +/F1.0 12 Tf +[<54> 18 <4f20544845204d494e494d554d20504552494f44205045524d4954544544204259204c41> 90 <57> 92 <2e>] TJ +ET + + +BT +60.0 646.844 Td +/F2.0 12 Tf +[<352e20436f6e73657175656e7469616c2044616d6167652057> 65 <6169> 10 <76> 10 <6572>] TJ +ET + + +BT +60.0 623.008 Td +/F1.0 12 Tf +[<54> 18 <4f20544845204d4158494d554d20455854454e54205045524d4954544544204259204150504c494341424c45204c41> 90 <57> 92 <2c20494e204e4f204556454e542057494c4c>] TJ +ET + + +BT +60.0 609.616 Td +/F1.0 12 Tf +[<59> 30 <4f55204f52205553204245204c4941424c4520464f5220414e59204c4f5353204f46205052> 40 <4f464954532c204c4f5353204f4620414e5449434950> 92 <41> 111 <544544205341> 135 <56494e47532c>] TJ +ET + + +BT +60.0 596.224 Td +/F1.0 12 Tf +[<4c4f5353204f462044> 40 <41> 111 <54> 93 <412c20494e444952454354> 74 <2c205350454349414c2c20494e434944454e54> 93 <414c2c20434f4e534551> 10 <55454e5449414c20414e44204558454d504c4152> 65 <59>] TJ +ET + + +BT +60.0 582.832 Td +/F1.0 12 Tf +[<44> 40 <414d41> 40 <4745532041524953494e47204f5554204f4620544849532041> 40 <475245454d454e54205245474152444c455353204f4620544845204c4547414c204f52>] TJ +ET + + +BT +60.0 569.44 Td +/F1.0 12 Tf +[<4551> 10 <554954> 93 <41424c45205448454f52> 65 <592028434f4e545241> 40 <4354> 74 <2c2054> 18 <4f52> 60 <54204f52204f> 40 <54484552> 55 <57495345292055504f4e2057484943482054484520434c41494d204953>] TJ +ET + + +BT +60.0 556.048 Td +/F1.0 12 Tf +[<42> 35 <415345442e>] TJ +ET + + +BT +60.0 532.656 Td +/F2.0 12 Tf +[<362e204d697363656c6c616e656f7573>] TJ +ET + + +BT +60.0 508.82 Td +/F1.0 12 Tf +[<362e3120546869732041677265656d656e742077696c6c20626520676f> 15 <76> 15 <65726e656420627920616e6420636f6e73747275656420696e206163636f7264616e6365207769746820746865206c61> 15 <7773206f66202065> 15 <78636c7564696e6720697473>] TJ +ET + + +BT +60.0 495.428 Td +/F1.0 12 Tf +[<636f6e666c69637473206f66206c61> 15 <772070726f> 15 <766973696f6e732e20556e646572206365727461696e2063697263756d7374616e6365732c2074686520676f> 15 <76> 15 <65726e696e67206c61> 15 <7720696e20746869732073656374696f6e206d69676874206265>] TJ +ET + + +BT +60.0 482.036 Td +/F1.0 12 Tf +[<737570657273656465642062792074686520556e69746564204e6174696f6e7320436f6e> 40 <76> 15 <656e74696f6e206f6e20436f6e74726163747320666f722074686520496e7465726e6174696f6e616c2053616c65206f6620476f6f6473202822554e>] TJ +ET + + +BT +60.0 468.644 Td +/F1.0 12 Tf +[<436f6e> 40 <76> 15 <656e74696f6e222920616e6420746865207061727469657320696e74656e6420746f2061> 20 <76> 20 <6f696420746865206170706c69636174696f6e206f662074686520554e20436f6e> 40 <76> 15 <656e74696f6e20746f20746869732041677265656d656e74>] TJ +ET + + +BT +60.0 455.252 Td +/F1.0 12 Tf +[<616e642c20746875732c2065> 15 <78636c75646520746865206170706c69636174696f6e206f662074686520554e20436f6e> 40 <76> 15 <656e74696f6e20696e2069747320656e74697265747920746f20746869732041677265656d656e742e>] TJ +ET + + +BT +60.0 431.86 Td +/F1.0 12 Tf +[<362e3220546869732041677265656d656e742073657473206f75742074686520656e746972652061677265656d656e74206265747765656e2059> 110 <6f7520616e6420557320666f722059> 110 <6f757220436f6e74726962> 20 <7574696f6e7320746f205573>] TJ +ET + + +BT +60.0 418.468 Td +/F1.0 12 Tf +[<616e64206f> 15 <76> 15 <6572726964657320616c6c206f746865722061677265656d656e7473206f7220756e6465727374616e64696e67732e>] TJ +ET + + +BT +60.0 395.076 Td +/F1.0 12 Tf +[<362e33202049662059> 110 <6f75206f722057> 80 <652061737369676e2074686520726967687473206f72206f626c6967> 5 <6174696f6e73207265636569> 25 <76> 15 <6564207468726f75676820746869732041677265656d656e7420746f2061207468697264207061727479> 65 <2c2061732061>] TJ +ET + + +BT +60.0 381.684 Td +/F1.0 12 Tf +[<636f6e646974696f6e206f66207468652061737369676e6d656e742c2074686174207468697264207061727479206d75737420616772656520696e2077726974696e6720746f20616269646520627920616c6c207468652072696768747320616e64>] TJ +ET + + +BT +60.0 368.292 Td +/F1.0 12 Tf +[<6f626c6967> 5 <6174696f6e7320696e207468652041677265656d656e742e>] TJ +ET + + +BT +60.0 344.9 Td +/F1.0 12 Tf +[<362e34205468652066> 10 <61696c757265206f662065697468657220706172747920746f207265717569726520706572666f726d616e636520627920746865206f74686572207061727479206f6620616e> 15 <792070726f> 15 <766973696f6e206f662074686973>] TJ +ET + + +BT +60.0 331.508 Td +/F1.0 12 Tf +[<41677265656d656e7420696e206f6e6520736974756174696f6e207368616c6c206e6f74206166> 25 <6665637420746865207269676874206f66206120706172747920746f2072657175697265207375636820706572666f726d616e636520617420616e> 15 <792074696d65>] TJ +ET + + +BT +60.0 318.116 Td +/F1.0 12 Tf +[<696e20746865206675747572652e20412077> 10 <6169> 25 <76> 15 <6572206f6620706572666f726d616e636520756e64657220612070726f> 15 <766973696f6e20696e206f6e6520736974756174696f6e207368616c6c206e6f7420626520636f6e736964657265642061>] TJ +ET + + +BT +60.0 304.724 Td +/F1.0 12 Tf +[<77> 10 <6169> 25 <76> 15 <6572206f662074686520706572666f726d616e6365206f66207468652070726f> 15 <766973696f6e20696e2074686520667574757265206f7220612077> 10 <6169> 25 <76> 15 <6572206f66207468652070726f> 15 <766973696f6e20696e2069747320656e746972657479> 65 <2e>] TJ +ET + + +BT +60.0 281.332 Td +/F1.0 12 Tf +[<362e3520496620616e> 15 <792070726f> 15 <766973696f6e206f6620746869732041677265656d656e7420697320666f756e642076> 20 <6f696420616e6420756e656e666f72636561626c652c20737563682070726f> 15 <766973696f6e2077696c6c206265>] TJ +ET + + +BT +60.0 267.94 Td +/F1.0 12 Tf +[<7265706c6163656420746f207468652065> 15 <7874656e7420706f737369626c65207769746820612070726f> 15 <766973696f6e207468617420636f6d657320636c6f7365737420746f20746865206d65616e696e67206f6620746865206f726967696e616c>] TJ +ET + + +BT +60.0 254.548 Td +/F1.0 12 Tf +[<70726f> 15 <766973696f6e20616e6420776869636820697320656e666f72636561626c652e2020546865207465726d7320616e6420636f6e646974696f6e732073657420666f72746820696e20746869732041677265656d656e74207368616c6c206170706c79>] TJ +ET + + +BT +60.0 241.156 Td +/F1.0 12 Tf +[<6e6f74776974687374616e64696e6720616e> 15 <792066> 10 <61696c757265206f6620657373656e7469616c20707572706f7365206f6620746869732041677265656d656e74206f7220616e> 15 <79206c696d697465642072656d65647920746f20746865>] TJ +ET + + +BT +60.0 227.764 Td +/F1.0 12 Tf +[<6d6178696d756d2065> 15 <7874656e7420706f737369626c6520756e646572206c61> 15 <77> 65 <2e>] TJ +ET + + +BT +60.0 204.372 Td +/F1.0 12 Tf +[<59> 110 <6f75>] TJ +ET + + +BT +60.0 190.98 Td +/F1.0 12 Tf +[<5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 177.588 Td +/F1.0 12 Tf +[<4e616d653a205f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 164.196 Td +/F1.0 12 Tf +[<54> 35 <69746c653a205f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 150.804 Td +/F1.0 12 Tf +[<416464726573733a205f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 137.412 Td +/F1.0 12 Tf +[<5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 114.02 Td +/F1.0 12 Tf +[<5573>] TJ +ET + + +BT +60.0 100.628 Td +/F1.0 12 Tf +[<5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 87.236 Td +/F1.0 12 Tf +[<4e616d653a205f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 73.844 Td +/F1.0 12 Tf +[<54> 35 <69746c653a205f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +533.336 44.536 Td +/F1.0 8 Tf +[<33206f662034>] TJ +ET + +/Stamp1 Do +Q + +endstream +endobj +12 0 obj +<< /Type /Page +/Parent 3 0 R +/MediaBox [0 0 612.0 792.0] +/Contents 11 0 R +/Resources << /ProcSet [/PDF /Text /ImageB /ImageC /ImageI] +/Font << /F1.0 7 0 R +/F2.0 8 0 R +>> +/XObject << /Stamp1 6 0 R +>> +>> +>> +endobj +13 0 obj +<< /Length 258 +>> +stream +q + +BT +60.0 723.804 Td +/F1.0 12 Tf +[<416464726573733a205f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 710.412 Td +/F1.0 12 Tf +[<5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +533.336 44.536 Td +/F1.0 8 Tf +[<34206f662034>] TJ +ET + +/Stamp1 Do +Q + +endstream +endobj +14 0 obj +<< /Type /Page +/Parent 3 0 R +/MediaBox [0 0 612.0 792.0] +/Contents 13 0 R +/Resources << /ProcSet [/PDF /Text /ImageB /ImageC /ImageI] +/Font << /F1.0 7 0 R +>> +/XObject << /Stamp1 6 0 R +>> +>> +>> +endobj +xref +0 15 +0000000000 65535 f +0000000015 00000 n +0000000109 00000 n +0000000158 00000 n +0000000236 00000 n +0000008994 00000 n +0000009213 00000 n +0000009567 00000 n +0000009666 00000 n +0000009764 00000 n +0000017871 00000 n +0000018091 00000 n +0000025984 00000 n +0000026205 00000 n +0000026515 00000 n +trailer +<< /Size 15 +/Root 2 0 R +/Info 1 0 R +>> +startxref +26724 +%%EOF diff --git a/scorpio-broker/ScorpioBroker-Individual.pdf b/scorpio-broker/ScorpioBroker-Individual.pdf new file mode 100644 index 0000000000000000000000000000000000000000..c3b027446301de3bc7c437cab8af65e12ef43176 --- /dev/null +++ b/scorpio-broker/ScorpioBroker-Individual.pdf @@ -0,0 +1,990 @@ +%PDF-1.3 +%ÿÿÿÿ +1 0 obj +<< /Creator +/Producer +>> +endobj +2 0 obj +<< /Type /Catalog +/Pages 3 0 R +>> +endobj +3 0 obj +<< /Type /Pages +/Count 3 +/Kids [5 0 R 10 0 R 12 0 R] +>> +endobj +4 0 obj +<< /Length 8107 +>> +stream +q + +BT +156.852 723.804 Td +/F2.0 12 Tf +[<53636f72> 10 <70696f4272> 18 <6f6b> 10 <657220496e6469> 10 <76696475616c20436f6e74726962> 20 <75746f72204c6963656e736520416772> 18 <65656d656e74>] TJ +ET + + +BT +60.0 699.968 Td +/F1.0 12 Tf +[<5468616e6b20796f7520666f7220796f757220696e74657265737420696e20636f6e74726962> 20 <7574696e6720746f2053636f7270696f42726f6b> 10 <657220282257> 80 <6522206f722022557322292e>] TJ +ET + + +BT +60.0 676.576 Td +/F1.0 12 Tf +[<5468697320636f6e74726962> 20 <75746f722061677265656d656e7420282241677265656d656e74222920646f63756d656e74732074686520726967687473206772616e74656420627920636f6e74726962> 20 <75746f727320746f2055732e2054> 80 <6f>] TJ +ET + + +BT +60.0 663.184 Td +/F1.0 12 Tf +[<6d616b> 10 <65207468697320646f63756d656e74206566> 25 <6665637469> 25 <76> 15 <652c20706c65617365207369676e20697420616e642073656e6420697420746f20557320627920656d61696c2c20666f6c6c6f> 25 <77696e672074686520696e737472756374696f6e73206174>] TJ +ET + + +BT +60.0 649.792 Td +/F1.0 12 Tf +[<68747470733a2f2f676974687562> 40 <2e636f6d2f53636f7270696f42726f6b> 10 <65722f53636f7270696f42726f6b> 10 <65722f626c6f622f6465> 25 <76> 15 <656c6f706d656e742f434f4e54524942> 10 <5554494e472e6d642e20546869732069732061>] TJ +ET + + +BT +60.0 636.4 Td +/F1.0 12 Tf +[<6c65> 15 <67> 5 <616c6c792062696e64696e6720646f63756d656e742c20736f20706c656173652072656164206974206361726566756c6c79206265666f7265206167726565696e6720746f2069742e205468652041677265656d656e74206d617920636f> 15 <76> 15 <6572>] TJ +ET + + +BT +60.0 623.008 Td +/F1.0 12 Tf +[<6d6f7265207468616e206f6e6520736f667477> 10 <6172652070726f6a656374206d616e616765642062792055732e>] TJ +ET + + +BT +60.0 599.616 Td +/F2.0 12 Tf +[<312e20446566> 25 <696e6974696f6e73>] TJ +ET + + +BT +80.0 575.78 Td +/F1.0 12 Tf +[<2259> 110 <6f7522206d65616e732074686520696e6469> 25 <76696475616c2077686f205375626d697473206120436f6e74726962> 20 <7574696f6e20746f2055732e>] TJ +ET + + +BT +80.0 552.388 Td +/F1.0 12 Tf +[<22436f6e74726962> 20 <7574696f6e22206d65616e7320616e> 15 <792077> 10 <6f726b206f6620617574686f72736869702074686174206973205375626d69747465642062792059> 110 <6f7520746f20557320696e2077686963682059> 110 <6f75206f> 25 <776e>] TJ +ET + + +BT +80.0 538.996 Td +/F1.0 12 Tf +[<6f7220617373657274206f> 25 <776e657273686970206f662074686520436f70> 10 <7972696768742e2049662059> 110 <6f7520646f206e6f74206f> 25 <776e2074686520436f70> 10 <79726967687420696e2074686520656e746972652077> 10 <6f726b206f66>] TJ +ET + + +BT +80.0 525.604 Td +/F1.0 12 Tf +[<617574686f72736869702c20706c6561736520666f6c6c6f> 25 <772074686520696e737472756374696f6e7320696e202e>] TJ +ET + + +BT +80.0 502.212 Td +/F1.0 12 Tf +[<22436f70> 10 <79726967687422206d65616e7320616c6c207269676874732070726f74656374696e672077> 10 <6f726b73206f6620617574686f7273686970206f> 25 <776e6564206f7220636f6e74726f6c6c65642062792059> 110 <6f752c>] TJ +ET + + +BT +80.0 488.82 Td +/F1.0 12 Tf +[<696e636c7564696e6720636f70> 10 <7972696768742c206d6f72616c20616e64206e65696768626f72696e67207269676874732c20617320617070726f7072696174652c20666f72207468652066756c6c207465726d206f66207468656972>] TJ +ET + + +BT +80.0 475.428 Td +/F1.0 12 Tf +[<65> 15 <78697374656e636520696e636c7564696e6720616e> 15 <792065> 15 <7874656e73696f6e732062792059> 110 <6f752e>] TJ +ET + + +BT +80.0 452.036 Td +/F1.0 12 Tf +[<224d6174657269616c22206d65616e73207468652077> 10 <6f726b206f6620617574686f7273686970207768696368206973206d6164652061> 20 <76> 25 <61696c61626c6520627920557320746f20746869726420706172746965732e205768656e>] TJ +ET + + +BT +80.0 438.644 Td +/F1.0 12 Tf +[<746869732041677265656d656e7420636f> 15 <76> 15 <657273206d6f7265207468616e206f6e6520736f667477> 10 <6172652070726f6a6563742c20746865204d6174657269616c206d65616e73207468652077> 10 <6f726b206f6620617574686f7273686970>] TJ +ET + + +BT +80.0 425.252 Td +/F1.0 12 Tf +[<746f2077686963682074686520436f6e74726962> 20 <7574696f6e2077> 10 <6173205375626d69747465642e2041667465722059> 110 <6f75205375626d69742074686520436f6e74726962> 20 <7574696f6e2c206974206d617920626520696e636c75646564>] TJ +ET + + +BT +80.0 411.86 Td +/F1.0 12 Tf +[<696e20746865204d6174657269616c2e>] TJ +ET + + +BT +80.0 388.468 Td +/F1.0 12 Tf +[<225375626d69742220206d65616e7320616e> 15 <7920666f726d206f6620656c656374726f6e69632c2076> 15 <657262616c2c206f72207772697474656e20636f6d6d756e69636174696f6e2073656e7420746f205573206f72206f7572>] TJ +ET + + +BT +80.0 375.076 Td +/F1.0 12 Tf +[<726570726573656e74617469> 25 <76> 15 <65732c20696e636c7564696e672062> 20 <7574206e6f74206c696d6974656420746f20656c656374726f6e6963206d61696c696e67206c697374732c20736f7572636520636f646520636f6e74726f6c2073797374656d732c>] TJ +ET + + +BT +80.0 361.684 Td +/F1.0 12 Tf +[<616e6420697373756520747261636b696e672073797374656d73207468617420617265206d616e61676564206279> 65 <2c206f72206f6e20626568616c66206f662c20557320666f722074686520707572706f7365206f662064697363757373696e67>] TJ +ET + + +BT +80.0 348.292 Td +/F1.0 12 Tf +[<616e6420696d70726f> 15 <76696e6720746865204d6174657269616c2c2062> 20 <75742065> 15 <78636c7564696e6720636f6d6d756e69636174696f6e207468617420697320636f6e73706963756f75736c79206d61726b> 10 <6564206f72>] TJ +ET + + +BT +80.0 334.9 Td +/F1.0 12 Tf +[<6f74686572776973652064657369676e6174656420696e2077726974696e672062792059> 110 <6f7520617320224e6f74206120436f6e74726962> 20 <7574696f6e2e22>] TJ +ET + + +BT +80.0 311.508 Td +/F1.0 12 Tf +[<225375626d697373696f6e204461746522206d65616e73207468652064617465206f6e2077686963682059> 110 <6f75205375626d6974206120436f6e74726962> 20 <7574696f6e20746f2055732e>] TJ +ET + + +BT +80.0 288.116 Td +/F1.0 12 Tf +[<224566> 25 <6665637469> 25 <76> 15 <65204461746522206d65616e732074686520646174652059> 110 <6f752065> 15 <78> 15 <656375746520746869732041677265656d656e74206f722074686520646174652059> 110 <6f752066> 20 <69727374205375626d69742061>] TJ +ET + + +BT +80.0 274.724 Td +/F1.0 12 Tf +[<436f6e74726962> 20 <7574696f6e20746f2055732c20776869636865> 25 <76> 15 <6572206973206561726c696572> 55 <2e>] TJ +ET + + +BT +80.0 251.332 Td +/F1.0 12 Tf +[<224d6564696122206d65616e7320616e> 15 <7920706f7274696f6e206f66206120436f6e74726962> 20 <7574696f6e207768696368206973206e6f7420736f667477> 10 <6172652e>] TJ +ET + + +BT +60.0 227.94 Td +/F2.0 12 Tf +[<322e204772616e74206f6620526967687473>] TJ +ET + + +BT +60.0 204.104 Td +/F1.0 12 Tf +[<322e3120436f70> 10 <797269676874204c6963656e7365>] TJ +ET + + +BT +60.0 180.712 Td +/F1.0 12 Tf +[<2861292059> 110 <6f752072657461696e206f> 25 <776e657273686970206f662074686520436f70> 10 <79726967687420696e2059> 110 <6f757220436f6e74726962> 20 <7574696f6e20616e64206861> 20 <76> 15 <65207468652073616d652072696768747320746f20757365206f72>] TJ +ET + + +BT +60.0 167.32 Td +/F1.0 12 Tf +[<6c6963656e73652074686520436f6e74726962> 20 <7574696f6e2077686963682059> 110 <6f752077> 10 <6f756c64206861> 20 <76> 15 <652068616420776974686f757420656e746572696e6720696e746f207468652041677265656d656e742e>] TJ +ET + + +BT +60.0 143.928 Td +/F1.0 12 Tf +[<2862292054> 80 <6f20746865206d6178696d756d2065> 15 <7874656e74207065726d6974746564206279207468652072656c65> 25 <76> 25 <616e74206c61> 15 <77> 65 <2c2059> 110 <6f75206772616e7420746f20557320612070657270657475616c2c2077> 10 <6f726c64776964652c>] TJ +ET + + +BT +60.0 130.536 Td +/F1.0 12 Tf +[<6e6f6e2d65> 15 <78636c757369> 25 <76> 15 <652c207472616e7366657261626c652c20726f> 10 <79616c74792d667265652c2069727265> 25 <76> 20 <6f6361626c65206c6963656e736520756e6465722074686520436f70> 10 <79726967687420636f> 15 <76> 15 <6572696e6720746865>] TJ +ET + + +BT +60.0 117.144 Td +/F1.0 12 Tf +[<436f6e74726962> 20 <7574696f6e2c20776974682074686520726967687420746f207375626c6963656e7365207375636820726967687473207468726f756768206d756c7469706c65207469657273206f66207375626c6963656e736565732c20746f>] TJ +ET + + +BT +60.0 103.752 Td +/F1.0 12 Tf +[<726570726f647563652c206d6f64696679> 65 <2c20646973706c6179> 65 <2c20706572666f726d20616e642064697374726962> 20 <7574652074686520436f6e74726962> 20 <7574696f6e2061732070617274206f6620746865204d6174657269616c3b2070726f> 15 <7669646564>] TJ +ET + + +BT +60.0 90.36 Td +/F1.0 12 Tf +[<746861742074686973206c6963656e736520697320636f6e646974696f6e65642075706f6e20636f6d706c69616e636520776974682053656374696f6e20322e332e>] TJ +ET + + +BT +60.0 66.968 Td +/F1.0 12 Tf +[<322e322050> 15 <6174656e74204c6963656e7365>] TJ +ET + + +BT +533.336 44.536 Td +/F1.0 8 Tf +[<31206f662033>] TJ +ET + +/Stamp1 Do +Q + +endstream +endobj +5 0 obj +<< /Type /Page +/Parent 3 0 R +/MediaBox [0 0 612.0 792.0] +/Contents 4 0 R +/Resources << /ProcSet [/PDF /Text /ImageB /ImageC /ImageI] +/Font << /F2.0 8 0 R +/F1.0 7 0 R +>> +/XObject << /Stamp1 6 0 R +>> +>> +>> +endobj +6 0 obj +<< /Type /XObject +/Subtype /Form +/BBox [0 0 612.0 792.0] +/Resources << /Font << /F1.0 7 0 R +>> +>> +/Length 208 +>> +stream +q +/DeviceRGB cs +0.000 0.000 0.000 scn +/DeviceRGB CS +0.000 0.000 0.000 SCN +1 w +0 J +0 j +[] 0 d + +BT +60.0 50.0 Td +/F1.0 8 Tf +[<4861726d6f6e> 15 <79202848412d434c412d49292056> 111 <657273696f6e20312e30>] TJ +ET + +Q + +endstream +endobj +7 0 obj +<< /Type /Font +/Subtype /Type1 +/BaseFont /Times-Roman +/Encoding /WinAnsiEncoding +>> +endobj +8 0 obj +<< /Type /Font +/Subtype /Type1 +/BaseFont /Times-Bold +/Encoding /WinAnsiEncoding +>> +endobj +9 0 obj +<< /Length 8337 +>> +stream +q + +BT +60.0 723.804 Td +/F1.0 12 Tf +[<46> 15 <6f7220706174656e7420636c61696d7320696e636c7564696e672c20776974686f7574206c696d69746174696f6e2c206d6574686f642c2070726f636573732c20616e642061707061726174757320636c61696d732077686963682059> 110 <6f75>] TJ +ET + + +BT +60.0 710.412 Td +/F1.0 12 Tf +[<6f> 25 <776e2c20636f6e74726f6c206f72206861> 20 <76> 15 <652074686520726967687420746f206772616e742c206e6f> 25 <77206f7220696e20746865206675747572652c2059> 110 <6f75206772616e7420746f20557320612070657270657475616c2c2077> 10 <6f726c64776964652c>] TJ +ET + + +BT +60.0 697.02 Td +/F1.0 12 Tf +[<6e6f6e2d65> 15 <78636c757369> 25 <76> 15 <652c207472616e7366657261626c652c20726f> 10 <79616c74792d667265652c2069727265> 25 <76> 20 <6f6361626c6520706174656e74206c6963656e73652c20776974682074686520726967687420746f207375626c6963656e7365207468657365>] TJ +ET + + +BT +60.0 683.628 Td +/F1.0 12 Tf +[<72696768747320746f206d756c7469706c65207469657273206f66207375626c6963656e736565732c20746f206d616b> 10 <652c206861> 20 <76> 15 <65206d6164652c207573652c2073656c6c2c206f66> 25 <66657220666f722073616c652c20696d706f727420616e64>] TJ +ET + + +BT +60.0 670.236 Td +/F1.0 12 Tf +[<6f7468657277697365207472616e736665722074686520436f6e74726962> 20 <7574696f6e20616e642074686520436f6e74726962> 20 <7574696f6e20696e20636f6d62696e6174696f6e207769746820746865204d6174657269616c2028616e64>] TJ +ET + + +BT +60.0 656.844 Td +/F1.0 12 Tf +[<706f7274696f6e73206f66207375636820636f6d62696e6174696f6e292e2054686973206c6963656e7365206973206772616e746564206f6e6c7920746f207468652065> 15 <7874656e742074686174207468652065> 15 <78> 15 <657263697365206f6620746865>] TJ +ET + + +BT +60.0 643.452 Td +/F1.0 12 Tf +[<6c6963656e7365642072696768747320696e6672696e676573207375636820706174656e7420636c61696d733b20616e642070726f> 15 <766964656420746861742074686973206c6963656e736520697320636f6e646974696f6e65642075706f6e>] TJ +ET + + +BT +60.0 630.06 Td +/F1.0 12 Tf +[<636f6d706c69616e636520776974682053656374696f6e20322e332e>] TJ +ET + + +BT +60.0 606.668 Td +/F1.0 12 Tf +[<322e33204f7574626f756e64204c6963656e7365>] TJ +ET + + +BT +60.0 583.276 Td +/F1.0 12 Tf +[<4173206120636f6e646974696f6e206f6e20746865206772616e74206f662072696768747320696e2053656374696f6e7320322e3120616e6420322e322c2057> 80 <6520616772656520746f206c6963656e73652074686520436f6e74726962> 20 <7574696f6e206f6e6c79>] TJ +ET + + +BT +60.0 569.884 Td +/F1.0 12 Tf +[<756e64657220746865207465726d73206f6620746865206c6963656e7365206f72206c6963656e7365732077686963682057> 80 <6520617265207573696e67206f6e20746865205375626d697373696f6e204461746520666f7220746865204d6174657269616c>] TJ +ET + + +BT +60.0 556.492 Td +/F1.0 12 Tf +[<28696e636c7564696e6720616e> 15 <792072696768747320746f2061646f707420616e> 15 <79206675747572652076> 15 <657273696f6e206f662061206c6963656e7365206966207065726d6974746564292e>] TJ +ET + + +BT +60.0 533.1 Td +/F1.0 12 Tf +[<496e206164646974696f6e2c2057> 80 <65206d6179207573652074686520666f6c6c6f> 25 <77696e67206c6963656e73657320666f72204d6564696120696e2074686520436f6e74726962> 20 <7574696f6e3a20437265617469> 25 <76> 15 <6520436f6d6d6f6e73>] TJ +ET + + +BT +60.0 519.708 Td +/F1.0 12 Tf +[<5a65726f2076312e3020556e69> 25 <76> 15 <657273616c2028696e636c7564696e6720616e> 15 <7920726967687420746f2061646f707420616e> 15 <79206675747572652076> 15 <657273696f6e206f662061206c6963656e7365206966207065726d6974746564292e>] TJ +ET + + +BT +60.0 496.316 Td +/F1.0 12 Tf +[<322e34204d6f72616c205269676874732e204966206d6f72616c20726967687473206170706c7920746f2074686520436f6e74726962> 20 <7574696f6e2c20746f20746865206d6178696d756d2065> 15 <7874656e74207065726d6974746564206279206c61> 15 <77> 65 <2c>] TJ +ET + + +BT +60.0 482.924 Td +/F1.0 12 Tf +[<59> 110 <6f752077> 10 <6169> 25 <76> 15 <6520616e64206167726565206e6f7420746f206173736572742073756368206d6f72616c20726967687473206167> 5 <61696e7374205573206f72206f757220737563636573736f727320696e20696e7465726573742c206f7220616e> 15 <79206f66>] TJ +ET + + +BT +60.0 469.532 Td +/F1.0 12 Tf +[<6f7572206c6963656e736565732c2065697468657220646972656374206f7220696e6469726563742e>] TJ +ET + + +BT +60.0 446.14 Td +/F1.0 12 Tf +[<322e35204f7572205269676874732e2059> 110 <6f752061636b6e6f> 25 <776c6564676520746861742057> 80 <6520617265206e6f74206f626c6967> 5 <6174656420746f207573652059> 110 <6f757220436f6e74726962> 20 <7574696f6e2061732070617274206f6620746865>] TJ +ET + + +BT +60.0 432.748 Td +/F1.0 12 Tf +[<4d6174657269616c20616e64206d61792064656369646520746f20696e636c75646520616e> 15 <7920436f6e74726962> 20 <7574696f6e2057> 80 <6520636f6e736964657220617070726f7072696174652e>] TJ +ET + + +BT +60.0 409.356 Td +/F1.0 12 Tf +[<322e3620526573657276> 25 <6174696f6e206f66205269676874732e20416e> 15 <7920726967687473206e6f742065> 15 <7870726573736c792061737369676e6564206f72206c6963656e73656420756e64657220746869732073656374696f6e20617265>] TJ +ET + + +BT +60.0 395.964 Td +/F1.0 12 Tf +[<65> 15 <7870726573736c7920726573657276> 15 <65642062792059> 110 <6f752e>] TJ +ET + + +BT +60.0 372.572 Td +/F2.0 12 Tf +[<332e20416772> 18 <65656d656e74>] TJ +ET + + +BT +60.0 348.736 Td +/F1.0 12 Tf +[<59> 110 <6f7520636f6e66> 20 <69726d20746861743a>] TJ +ET + + +BT +60.0 325.344 Td +/F1.0 12 Tf +[<2861292059> 110 <6f75206861> 20 <76> 15 <6520746865206c65> 15 <67> 5 <616c20617574686f7269747920746f20656e74657220696e746f20746869732041677265656d656e742e>] TJ +ET + + +BT +60.0 301.952 Td +/F1.0 12 Tf +[<2862292059> 110 <6f75206f> 25 <776e2074686520436f70> 10 <79726967687420616e6420706174656e7420636c61696d7320636f> 15 <76> 15 <6572696e672074686520436f6e74726962> 20 <7574696f6e2077686963682061726520726571756972656420746f206772616e74>] TJ +ET + + +BT +60.0 288.56 Td +/F1.0 12 Tf +[<7468652072696768747320756e6465722053656374696f6e20322e>] TJ +ET + + +BT +60.0 265.168 Td +/F1.0 12 Tf +[<28632920546865206772616e74206f662072696768747320756e6465722053656374696f6e203220646f6573206e6f742076696f6c61746520616e> 15 <79206772616e74206f66207269676874732077686963682059> 110 <6f75206861> 20 <76> 15 <65206d61646520746f>] TJ +ET + + +BT +60.0 251.776 Td +/F1.0 12 Tf +[<746869726420706172746965732c20696e636c7564696e672059> 110 <6f757220656d706c6f> 10 <796572> 55 <2e202049662059> 110 <6f752061726520616e20656d706c6f> 10 <7965652c2059> 110 <6f75206861> 20 <76> 15 <65206861642059> 110 <6f757220656d706c6f> 10 <79657220617070726f> 15 <76> 15 <65>] TJ +ET + + +BT +60.0 238.384 Td +/F1.0 12 Tf +[<746869732041677265656d656e74206f72207369676e2074686520456e746974792076> 15 <657273696f6e206f66207468697320646f63756d656e742e202049662059> 110 <6f7520617265206c657373207468616e20656967687465656e207965617273206f6c642c>] TJ +ET + + +BT +60.0 224.992 Td +/F1.0 12 Tf +[<706c65617365206861> 20 <76> 15 <652059> 110 <6f757220706172656e7473206f7220677561726469616e207369676e207468652041677265656d656e742e>] TJ +ET + + +BT +60.0 201.6 Td +/F1.0 12 Tf +[<2864292059> 110 <6f75206861> 20 <76> 15 <6520666f6c6c6f> 25 <7765642074686520696e737472756374696f6e7320696e202c2069662059> 110 <6f7520646f206e6f74206f> 25 <776e2074686520436f70> 10 <79726967687420696e2074686520656e746972652077> 10 <6f726b206f66>] TJ +ET + + +BT +60.0 188.208 Td +/F1.0 12 Tf +[<617574686f7273686970205375626d69747465642e>] TJ +ET + + +BT +60.0 164.816 Td +/F2.0 12 Tf +[<342e20446973636c61696d6572>] TJ +ET + + +BT +60.0 140.98 Td +/F1.0 12 Tf +[<45584345505420464f522054484520455850524553532057> 120 <415252414e5449455320494e2053454354494f4e20332c2054484520434f4e54524942> 10 <5554494f4e204953>] TJ +ET + + +BT +60.0 127.588 Td +/F1.0 12 Tf +[<5052> 40 <4f> 50 <564944454420224153204953222e204d4f52452050> 92 <4152> 60 <544943554c41524c> 100 <59> 129 <2c20414c4c2045585052455353204f5220494d504c4945442057> 120 <415252414e54494553>] TJ +ET + + +BT +60.0 114.196 Td +/F1.0 12 Tf +[<494e434c5544494e472c20574954484f5554204c494d4954> 93 <41> 111 <54494f4e2c20414e5920494d504c4945442057> 120 <415252414e5459204f46204d45524348414e54> 93 <4142494c495459> 129 <2c>] TJ +ET + + +BT +60.0 100.804 Td +/F1.0 12 Tf +[<4649544e45535320464f5220412050> 92 <4152> 60 <544943554c415220505552504f534520414e44204e4f4e2d494e4652494e47454d454e542041524520455850524553534c> 100 <59>] TJ +ET + + +BT +60.0 87.412 Td +/F1.0 12 Tf +[<444953434c41494d45442042592059> 30 <4f552054> 18 <4f20555320414e442042592055532054> 18 <4f2059> 30 <4f552e2054> 18 <4f2054484520455854454e5420544841> 111 <5420414e592053554348>] TJ +ET + + +BT +60.0 74.02 Td +/F1.0 12 Tf +[<57> 120 <415252414e544945532043414e4e4f> 40 <5420424520444953434c41494d45442c20535543482057> 120 <415252414e5459204953204c494d4954454420494e2044555241> 111 <54494f4e>] TJ +ET + + +BT +533.336 44.536 Td +/F1.0 8 Tf +[<32206f662033>] TJ +ET + +/Stamp1 Do +Q + +endstream +endobj +10 0 obj +<< /Type /Page +/Parent 3 0 R +/MediaBox [0 0 612.0 792.0] +/Contents 9 0 R +/Resources << /ProcSet [/PDF /Text /ImageB /ImageC /ImageI] +/Font << /F1.0 7 0 R +/F2.0 8 0 R +>> +/XObject << /Stamp1 6 0 R +>> +>> +>> +endobj +11 0 obj +<< /Length 7082 +>> +stream +q + +BT +60.0 723.804 Td +/F1.0 12 Tf +[<54> 18 <4f20544845204d494e494d554d20504552494f44205045524d4954544544204259204c41> 90 <57> 92 <2e>] TJ +ET + + +BT +60.0 700.412 Td +/F2.0 12 Tf +[<352e20436f6e73657175656e7469616c2044616d6167652057> 65 <6169> 10 <76> 10 <6572>] TJ +ET + + +BT +60.0 676.576 Td +/F1.0 12 Tf +[<54> 18 <4f20544845204d4158494d554d20455854454e54205045524d4954544544204259204150504c494341424c45204c41> 90 <57> 92 <2c20494e204e4f204556454e542057494c4c>] TJ +ET + + +BT +60.0 663.184 Td +/F1.0 12 Tf +[<59> 30 <4f55204f52205553204245204c4941424c4520464f5220414e59204c4f5353204f46205052> 40 <4f464954532c204c4f5353204f4620414e5449434950> 92 <41> 111 <544544205341> 135 <56494e47532c>] TJ +ET + + +BT +60.0 649.792 Td +/F1.0 12 Tf +[<4c4f5353204f462044> 40 <41> 111 <54> 93 <412c20494e444952454354> 74 <2c205350454349414c2c20494e434944454e54> 93 <414c2c20434f4e534551> 10 <55454e5449414c20414e44204558454d504c4152> 65 <59>] TJ +ET + + +BT +60.0 636.4 Td +/F1.0 12 Tf +[<44> 40 <414d41> 40 <4745532041524953494e47204f5554204f4620544849532041> 40 <475245454d454e54205245474152444c455353204f4620544845204c4547414c204f52>] TJ +ET + + +BT +60.0 623.008 Td +/F1.0 12 Tf +[<4551> 10 <554954> 93 <41424c45205448454f52> 65 <592028434f4e545241> 40 <4354> 74 <2c2054> 18 <4f52> 60 <54204f52204f> 40 <54484552> 55 <57495345292055504f4e2057484943482054484520434c41494d204953>] TJ +ET + + +BT +60.0 609.616 Td +/F1.0 12 Tf +[<42> 35 <415345442e>] TJ +ET + + +BT +60.0 586.224 Td +/F2.0 12 Tf +[<362e204d697363656c6c616e656f7573>] TJ +ET + + +BT +60.0 562.388 Td +/F1.0 12 Tf +[<362e3120546869732041677265656d656e742077696c6c20626520676f> 15 <76> 15 <65726e656420627920616e6420636f6e73747275656420696e206163636f7264616e6365207769746820746865206c61> 15 <7773206f66202065> 15 <78636c7564696e6720697473>] TJ +ET + + +BT +60.0 548.996 Td +/F1.0 12 Tf +[<636f6e666c69637473206f66206c61> 15 <772070726f> 15 <766973696f6e732e20556e646572206365727461696e2063697263756d7374616e6365732c2074686520676f> 15 <76> 15 <65726e696e67206c61> 15 <7720696e20746869732073656374696f6e206d69676874206265>] TJ +ET + + +BT +60.0 535.604 Td +/F1.0 12 Tf +[<737570657273656465642062792074686520556e69746564204e6174696f6e7320436f6e> 40 <76> 15 <656e74696f6e206f6e20436f6e74726163747320666f722074686520496e7465726e6174696f6e616c2053616c65206f6620476f6f6473202822554e>] TJ +ET + + +BT +60.0 522.212 Td +/F1.0 12 Tf +[<436f6e> 40 <76> 15 <656e74696f6e222920616e6420746865207061727469657320696e74656e6420746f2061> 20 <76> 20 <6f696420746865206170706c69636174696f6e206f662074686520554e20436f6e> 40 <76> 15 <656e74696f6e20746f20746869732041677265656d656e74>] TJ +ET + + +BT +60.0 508.82 Td +/F1.0 12 Tf +[<616e642c20746875732c2065> 15 <78636c75646520746865206170706c69636174696f6e206f662074686520554e20436f6e> 40 <76> 15 <656e74696f6e20696e2069747320656e74697265747920746f20746869732041677265656d656e742e>] TJ +ET + + +BT +60.0 485.428 Td +/F1.0 12 Tf +[<362e3220546869732041677265656d656e742073657473206f75742074686520656e746972652061677265656d656e74206265747765656e2059> 110 <6f7520616e6420557320666f722059> 110 <6f757220436f6e74726962> 20 <7574696f6e7320746f205573>] TJ +ET + + +BT +60.0 472.036 Td +/F1.0 12 Tf +[<616e64206f> 15 <76> 15 <6572726964657320616c6c206f746865722061677265656d656e7473206f7220756e6465727374616e64696e67732e>] TJ +ET + + +BT +60.0 448.644 Td +/F1.0 12 Tf +[<362e33202049662059> 110 <6f75206f722057> 80 <652061737369676e2074686520726967687473206f72206f626c6967> 5 <6174696f6e73207265636569> 25 <76> 15 <6564207468726f75676820746869732041677265656d656e7420746f2061207468697264207061727479> 65 <2c2061732061>] TJ +ET + + +BT +60.0 435.252 Td +/F1.0 12 Tf +[<636f6e646974696f6e206f66207468652061737369676e6d656e742c2074686174207468697264207061727479206d75737420616772656520696e2077726974696e6720746f20616269646520627920616c6c207468652072696768747320616e64>] TJ +ET + + +BT +60.0 421.86 Td +/F1.0 12 Tf +[<6f626c6967> 5 <6174696f6e7320696e207468652041677265656d656e742e>] TJ +ET + + +BT +60.0 398.468 Td +/F1.0 12 Tf +[<362e34205468652066> 10 <61696c757265206f662065697468657220706172747920746f207265717569726520706572666f726d616e636520627920746865206f74686572207061727479206f6620616e> 15 <792070726f> 15 <766973696f6e206f662074686973>] TJ +ET + + +BT +60.0 385.076 Td +/F1.0 12 Tf +[<41677265656d656e7420696e206f6e6520736974756174696f6e207368616c6c206e6f74206166> 25 <6665637420746865207269676874206f66206120706172747920746f2072657175697265207375636820706572666f726d616e636520617420616e> 15 <792074696d65>] TJ +ET + + +BT +60.0 371.684 Td +/F1.0 12 Tf +[<696e20746865206675747572652e20412077> 10 <6169> 25 <76> 15 <6572206f6620706572666f726d616e636520756e64657220612070726f> 15 <766973696f6e20696e206f6e6520736974756174696f6e207368616c6c206e6f7420626520636f6e736964657265642061>] TJ +ET + + +BT +60.0 358.292 Td +/F1.0 12 Tf +[<77> 10 <6169> 25 <76> 15 <6572206f662074686520706572666f726d616e6365206f66207468652070726f> 15 <766973696f6e20696e2074686520667574757265206f7220612077> 10 <6169> 25 <76> 15 <6572206f66207468652070726f> 15 <766973696f6e20696e2069747320656e746972657479> 65 <2e>] TJ +ET + + +BT +60.0 334.9 Td +/F1.0 12 Tf +[<362e3520496620616e> 15 <792070726f> 15 <766973696f6e206f6620746869732041677265656d656e7420697320666f756e642076> 20 <6f696420616e6420756e656e666f72636561626c652c20737563682070726f> 15 <766973696f6e2077696c6c206265>] TJ +ET + + +BT +60.0 321.508 Td +/F1.0 12 Tf +[<7265706c6163656420746f207468652065> 15 <7874656e7420706f737369626c65207769746820612070726f> 15 <766973696f6e207468617420636f6d657320636c6f7365737420746f20746865206d65616e696e67206f6620746865206f726967696e616c>] TJ +ET + + +BT +60.0 308.116 Td +/F1.0 12 Tf +[<70726f> 15 <766973696f6e20616e6420776869636820697320656e666f72636561626c652e2020546865207465726d7320616e6420636f6e646974696f6e732073657420666f72746820696e20746869732041677265656d656e74207368616c6c206170706c79>] TJ +ET + + +BT +60.0 294.724 Td +/F1.0 12 Tf +[<6e6f74776974687374616e64696e6720616e> 15 <792066> 10 <61696c757265206f6620657373656e7469616c20707572706f7365206f6620746869732041677265656d656e74206f7220616e> 15 <79206c696d697465642072656d65647920746f20746865>] TJ +ET + + +BT +60.0 281.332 Td +/F1.0 12 Tf +[<6d6178696d756d2065> 15 <7874656e7420706f737369626c6520756e646572206c61> 15 <77> 65 <2e>] TJ +ET + + +BT +60.0 257.94 Td +/F1.0 12 Tf +[<59> 110 <6f75>] TJ +ET + + +BT +60.0 244.548 Td +/F1.0 12 Tf +[<5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 231.156 Td +/F1.0 12 Tf +[<4e616d653a205f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 217.764 Td +/F1.0 12 Tf +[<416464726573733a205f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 204.372 Td +/F1.0 12 Tf +[<5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 180.98 Td +/F1.0 12 Tf +[<5573>] TJ +ET + + +BT +60.0 167.588 Td +/F1.0 12 Tf +[<5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 154.196 Td +/F1.0 12 Tf +[<4e616d653a205f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 140.804 Td +/F1.0 12 Tf +[<54> 35 <69746c653a205f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 127.412 Td +/F1.0 12 Tf +[<416464726573733a205f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +60.0 114.02 Td +/F1.0 12 Tf +[<5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f5f>] TJ +ET + + +BT +533.336 44.536 Td +/F1.0 8 Tf +[<33206f662033>] TJ +ET + +/Stamp1 Do +Q + +endstream +endobj +12 0 obj +<< /Type /Page +/Parent 3 0 R +/MediaBox [0 0 612.0 792.0] +/Contents 11 0 R +/Resources << /ProcSet [/PDF /Text /ImageB /ImageC /ImageI] +/Font << /F1.0 7 0 R +/F2.0 8 0 R +>> +/XObject << /Stamp1 6 0 R +>> +>> +>> +endobj +xref +0 13 +0000000000 65535 f +0000000015 00000 n +0000000109 00000 n +0000000158 00000 n +0000000229 00000 n +0000008388 00000 n +0000008607 00000 n +0000008961 00000 n +0000009060 00000 n +0000009158 00000 n +0000017547 00000 n +0000017767 00000 n +0000024902 00000 n +trailer +<< /Size 13 +/Root 2 0 R +/Info 1 0 R +>> +startxref +25123 +%%EOF diff --git a/scorpio-broker/SnsFanoutMessaging/target/classes/META-INF/jandex.idx b/scorpio-broker/SnsFanoutMessaging/target/classes/META-INF/jandex.idx new file mode 100644 index 0000000000000000000000000000000000000000..3d85fd7a608d41e312487022151a645e6bfac915 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/classes/META-INF/jandex.idx differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/classes/META-INF/services/org/apache/camel/component/sns-fanout b/scorpio-broker/SnsFanoutMessaging/target/classes/META-INF/services/org/apache/camel/component/sns-fanout new file mode 100644 index 0000000000000000000000000000000000000000..ae64230d6a2d9cd22e1176813e6597a13aaadc4a --- /dev/null +++ b/scorpio-broker/SnsFanoutMessaging/target/classes/META-INF/services/org/apache/camel/component/sns-fanout @@ -0,0 +1 @@ +class=eu.neclab.ngsildbroker.messaging.sns.SnsSqsFanoutComponent \ No newline at end of file diff --git a/scorpio-broker/SnsFanoutMessaging/target/classes/application.properties b/scorpio-broker/SnsFanoutMessaging/target/classes/application.properties new file mode 100644 index 0000000000000000000000000000000000000000..ad3d4798a3d209bb0dddf0105e9ff5bdeba4d601 --- /dev/null +++ b/scorpio-broker/SnsFanoutMessaging/target/classes/application.properties @@ -0,0 +1 @@ +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils \ No newline at end of file diff --git a/scorpio-broker/SnsFanoutMessaging/target/classes/eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutComponent.class b/scorpio-broker/SnsFanoutMessaging/target/classes/eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutComponent.class new file mode 100644 index 0000000000000000000000000000000000000000..1b36940582e82cb723c67706fb612ce9bd001e47 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/classes/eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutComponent.class differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/classes/eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutEndpoint.class b/scorpio-broker/SnsFanoutMessaging/target/classes/eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutEndpoint.class new file mode 100644 index 0000000000000000000000000000000000000000..18ae289cc11dd144b79a261a7a60f489a3a2c57f Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/classes/eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutEndpoint.class differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/maven-archiver/pom.properties b/scorpio-broker/SnsFanoutMessaging/target/maven-archiver/pom.properties new file mode 100644 index 0000000000000000000000000000000000000000..411f364320801c6755a36f56d66d2355b76e5dde --- /dev/null +++ b/scorpio-broker/SnsFanoutMessaging/target/maven-archiver/pom.properties @@ -0,0 +1,5 @@ +#Generated by Maven +#Fri Jan 03 03:28:32 UTC 2025 +groupId=eu.neclab.ngsildbroker +artifactId=sns-fanout-messaging +version=5.0.5-SNAPSHOT diff --git a/scorpio-broker/SnsFanoutMessaging/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst b/scorpio-broker/SnsFanoutMessaging/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..89249d68fe9a80e387211aa8ca20610ca08bb056 --- /dev/null +++ b/scorpio-broker/SnsFanoutMessaging/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst @@ -0,0 +1,2 @@ +eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutEndpointTest.class +eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutComponentTest.class diff --git a/scorpio-broker/SnsFanoutMessaging/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst b/scorpio-broker/SnsFanoutMessaging/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..20253f1ff24df030a095cc24f08e73b044cf0dfd --- /dev/null +++ b/scorpio-broker/SnsFanoutMessaging/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst @@ -0,0 +1,2 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/SnsFanoutMessaging/src/test/java/eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutComponentTest.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SnsFanoutMessaging/src/test/java/eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutEndpointTest.java diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/app/sns-fanout-messaging-5.0.5-SNAPSHOT.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/app/sns-fanout-messaging-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..b5f33ed1a494beed061402550bc2573335074822 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/app/sns-fanout-messaging-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..9d5d452f4a66d1165b27d4d604d2bddaad0e4cd7 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d9c956f0001cff936eb20c373592dcc6510f5ff Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f9f583af71764a7127e4da73677ed5c8dea97bfb Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0447153ded16f86775cdcb337f8576579c1ac0db Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..243fa559c2be0f61720e1a333e43d4da2e8b5516 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ce4cbac674f51eb2063ff475a0e70484b25ace9f Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..55063658b2d25baf50b6a3963c508233f695b3de Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..4a7a8e4b537b532f4f58717f3366e9cb6f0ff0ce Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b320c9d42b93b26981927265e5dc62b85e73263f Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d2520242b28d239ba3b138c17e65f8ae6103a787 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..0648aa9f6a2ea3b3ddd083471f1c8dacbb4bbc4f Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..134f669d9fbee6ca61a9c9bb36227376ed97d0a2 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3365ab16bb0cd576f88b808e9af9b024111a5070 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..778ea557232b0fb41df34d63353c219a371660b2 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fd5c901f4bb0e8ed59d2d040740021a7c5cf1b19 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f74846f3f35230a9e14c0bf98e4cccfec593b4c9 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ee6d31a6a025d92f9fb05550483fb6b415b7f066 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..52835b2540d6cb8aaffe22bd5c7c24203cc77538 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..dba6fdd40e73a6dfc06a462bc2646c2bfb7e6d5a Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c13bcb9104e907195d13bbb4f998c1e5594cc2e8 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..34be9026612b9553f55f5f1aed148fc96a9d8fcb Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3750b8c1cfae96e79305618c78653ac5fb9b6de5 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..327ee706dcf46e428dd6339b9744e77941e5b498 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c47febca79456ba4d389bbb46ea0e11e6a41bede Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6ff58aed923740c9a4f639b9a512dcfd08df921 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3aa01f1ee73130e4983d2c3520220b29995c4ccc Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..288bf56e1b4f5c5a2bb2152887c5ef12e6cddeae Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..f4f1af783043658e2b3879560b6e1ff0b8db66a1 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1d6d1c50a344b45ba375d53775b70ad5aac58f7 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..18e59c8c41de73e02e77298e981fa7e3051e4b5d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a8f5dfe1b83122a9f085da1aa7fff451ed88e783 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b56dc751c1cc7dff75ed80ccbb45f027058e8ce Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar new file mode 100644 index 0000000000000000000000000000000000000000..10d10b62a49ad095f56d620620ee7eaa5d2fc62d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..3595c4f9be5c0ce779f8dd611e7f6917ca518f5d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a3336373b7aea1700b62d9aa60a15493586c3e8a Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..71079166b7bc51455b1e1d18ea4e5e942b3ae89f Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cbb633729cae09e5d65aefccd7b63c697f42b5cb Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eecfb70406fbaca61c7c9e5a549f77cbef2e849b Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5023670d73e75c539b0af285d35c4e9edaef2211 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eb3c2b0b82115e9820f781e944312b4c19b25ed4 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..684cc24821451b65b9fc36376131490a9d03b37c Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..36f71a550601f4842536ad1a62a187a30c60eb69 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..b4f917b9687dc231c8f3b4cf3fc0a95e616846ea Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..878cc677337985f59ed9f4bb5cfcdb8ca4d0acbe Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..9afa6d70ae20b7082f786920e918fd70c138a5b3 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b626ceafb52c318581529fafbd1d33889f25c20 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..043052d031f59a3b289cc2bb7dda9b8352c58e11 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..316bdec6ab1a6ea4cd4dc33c9217cdf96e4c9049 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..73e95705d3152472d11fa2a5690626b652ff280a Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..7192fa0e49a0762ad44c218215e6197dd12197b2 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..4ecfb5dbec2f25d201de0a83d1143729830d49bd Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..072d06d46d38bcb7a63efb38075bf79ea111caf1 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e19ecdbd4d53bc0c09246f73926aaae70fe9493 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..08c1d314876730dc6f82ba65e741f03b8719609d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..3427ee30e0e35e876eda5e5f1bed695d8c2636e9 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5facdd8e2f6e4f4fa3ceecc16962b3b15b9627c2 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..575355b83ce351f70a605a46ef5fa7f75f956835 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..39f5787fb3d458de0fd9f575345176b60e050961 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..a1e0f39535831b6f914098b776513c284c50b351 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..9637836909b5e59f7a83f88412a8571b4641f56b Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..93bc0ad010aab29e0a328d2ffafaeb60c82b3ecf Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..f06de9ed2afaa3668bccce11dcfe8924446817d0 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9f5b95529a5c7ac6044ffa4c60e6d412a1597104 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..9edc36a8a46a29b0e5a507fbd092a0e6629b8db7 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fe2973fcc47beca49e8e4061fe60a78c2a27102 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..0aedb22b8903d6eb70232c1e310266e0465b5f54 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..084d172fc3512a05145851e902ab1377a9d8b814 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..b740a4a827c0808baeda7112a5c40158e4eba664 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f99a4fc63360f9f5d1f0d751928b0b93d62ac81 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..75e3104ca9d784fa7fcaca27ecd239c02ac6e241 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3eab7c30fefb9c141226da5595a5faf9ff83d462 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..196cc61fca2b75593a6ac4cd38744fb6fcdc33bd Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-apache-httpclient-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-apache-httpclient-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..60d9d0db74f42ea7712e72e55617c25e56b3cd2e Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-apache-httpclient-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..070baec335bcc35a79bd1ddf7ee52e809331c4c3 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..80221e424d28060c8620964fef488107c54f4e94 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c607cbdf717926be37b821d11c9c4b0ed9a2802 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..17262bba0ff1a57d9e893d832f8268389adfe340 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..30ef9922f2c8fe0c6a28cd108f33f9632fe82411 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b3a47cef61d53f9997148c1502dbb4ec313a6a47 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5b530ad1e484936df857104583ffcaf78b1715e3 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..96950282b9778bf3a3bd26d4c9125ffdc0f95c96 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b447f8aeb87c42e4489c0db15f30f93d4707fa45 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..11a94c16234d6e460c6e66a840ac7976e756c458 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d325421d690f179c4626844df98915a36e74ea Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0089e0cb796e6183029df4847b1b600d974a8b1a Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..6abfe2c3ec3923ddea06c4e6f4a4a440d72fa649 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar new file mode 100644 index 0000000000000000000000000000000000000000..99c263dc3f71a215a4a85901538f1dedacb51acf Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d9dbbe1b73c8f5435b7309f77520f6cbff82600c Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5d3b9086c238e3c3dbb2af68ce47f52eb269c664 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c03be12b0b2c7549c2db53ed275e45cd87e8a4dc Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-jaxp-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-jaxp-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..56405e092a4976ea1426c255310cea941326d89e Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-jaxp-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0bf27883a28a42e50d17bca78ce368d79acaea13 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f440dcb8f735c055768cd95c2bc4457c4cbbb48 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e7adeb414fcb6108390beabc72d94e7f933e055d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fba12107dee341bd5894a0e05deb09280561e517 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..48ec53f0dba9ea734d83d21f7d793bb2178bb8d1 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff161c12c4949a473a1d2ed1f597d6338337a321 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..577b2bb43f4750ecb4f5c1b9ba952d3866bcea22 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..10a28407ba7c16165164c35be0f9657812349a84 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d0ee5b30dbe7a8cb00c15600142ef6041a97f44 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e3c7750dd2c267fea46f510636ae576bf26894a Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a38e1850efbb73aaf248318253b11e0d0a64a50e Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9e257c119006bed0c77c8b8a6b2e05adfc71dde6 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a85e9e76c2870fca83485b31334a81a02f12781 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..548a6405f93dd3254c1123ecbcce2f96f6995e30 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f65840267108fe2b704976f14e3c17bd02290e20 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..68b053f16299ba0a9bcd80c6e795c2c9e0b186a1 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..89aaf8be3250033348a32ac75c1edc52d9d264ac Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..add785757c2a958f86e3a20c6034d46a38427d83 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e6f98b3a199f27f5c5ab47000862c0558116f3ef Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa88a2ec11f998225646b08795fedc215e530e6f Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cfd8c45d73d6d6e8da9c8fd24e04b2dcb86bff83 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6438a41b88c1605a994451cda5657f0a52669ad Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4bb4f9457907978ea011d1fc390c6ab2e6217888 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..494d0bb23d9ae6619c2a8f2e18ec458676f39316 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8ec4c28f2f004dc6d7b8ca0d36eeaa4b79f4a4bd Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d5bfe9c8b3c66f75327e6ce621da822c6b67fc Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..90607ec6492a8667ea6d2c39f77389e4083a2017 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8f8612553283b7f00d9dc7ab7eb5313b2f1eca2 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b0c51a8f29ef87a1c6390dd98c2e1e6aa0114f7 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1dcea88498ba439a3ac32e291067331c99331ace Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..92721ae048041c7dbddda0c88d417d740f3c299d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3199cb971aa88ab386df667ad1d16d74d3bee22f Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c0ebec7fa19a8e96489714a2f9986017522becfb Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4696de7fd6cd077015cad5212b223de1dcc80307 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..04bc9cc78782c3b7bc78553481b94a10a02f8f9d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..14d1445dba365c803f94f24f6fcb8467d13cf6e5 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5859e692a03f1173aa1f2b272a844c94d24c12e3 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c8cb76d3f0d9c0853168e740754f4d79042a105c Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b038ad4a83ac6df0e0a8f71265877dad2ded43ab Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c63764d56c7a95f9abf5230b80fae1a414651b61 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df5d82b54357c697c0d95fd9b8db277facfe0cde Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..07d3721a9c5c5e314e5eee21ec168139bcfed6c4 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8f7f3668d903b867c021d47fd08c02c45374e29c Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a774ecd7174486d0c6d077c388f1021c3b2ef05 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f90f28b0d6afa7518cd3095f64d8dbd9fd56b77d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ba5add79fc5dee32ae8d3dfc0ffeebe75541b603 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..d32bd30fa7bfa74da1330810bc3eb3c07f174385 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..cc828adf40879d957e266f0b781f7bb3d864af07 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..08a59e5bb2d5572b0fdbe693f48a3f0a5073d4f0 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b821617242260f35360fb4247a01d66dea563004 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9908f1069718a85031db676224857e26dc7401f0 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d09f435a3cd1ae0395b4926c78311be6b276e0c2 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9bf5d12b59534df460f6ccb01dad8c8ffcd8a542 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3c5639431a5d51ef16ef94d82225effb271e8467 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d55e5f07ddae6c98b08fbff4848461fdbb2adf73 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..760d92fc11fe3672030cdd7c71416ad75b8ac775 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7504902f7621ee1764e207e2f5b2ffdeaa22e261 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..11867e294ef35ca6e9a6a82fb52dd52821c35d48 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e268ff4ef4543b76568d66444df43ce45751d3bc Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..662cf14d5c427f09ea259c484befd4cc2d328b2d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..417d8b59b8ce5998e21e0275beb878ac50008bd8 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d3ab26e2476843ebb0d27c983443ff9a7ba3fee8 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f08a939bd2b90b9f87dc3f15ab88e15e8c48087e Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..abb08bb751b0e037476fc15d4da40a44d5432966 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57bc48ad35c44ab05ee4f8575f552ea0dd3b04c2 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..98d6fa439ad69c1dc9436d76d43e3a34636729ad Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..373471c5b69b1468fff9a50d614c1b84fa69fbdd Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff59cf0f6f80ce2f55e1f08afbd1750c7df6e2fc Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..49443391feae0e4b18c570a3a007db6187108031 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..723430b5a2624a415e2754bad5f69fd151e71dd3 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6169a6b468ab05de2c7962fc64ce8567f46c444 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b7cc8f08c3aa4845416101f90521f8736eef6973 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f1d68c985deb0d3b5c865a91ca3489e94d623dcf Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..99dc47ca15667630606af6d41f5e616acd66b168 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c7f07c892fbe634044ba8b8333aac2195d84a4b0 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..97a49b08f496d01d6a50e3b2455476c5b23ba50d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ece3eb2d02159abb38ed4ea6b8fc4b599616c79e Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9ed9c88db7be8648c5b501b787a1addf9f0c1981 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b8cc43f8b9896a24d916d7bfdbdd02e396d7db1 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b232bcabeac47c30ac02371188d10bdb5d214a88 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8701dd9d98c6c7d61ea547823759f909474f7a9d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..473ce54348410627710157274dd8d2fdfa39c0df Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..16255cf4093cd574a00574f812ee5109478fe9a4 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c3c712e90ce6ebdae145eec147d6d4a50bd0fe53 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..354030f4393f7f920b6e03ff894d8e5ea8727797 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..284a20484992c849fd9a5fb0f83f8f639f5d562f Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..467756a558a61103b2dc767e0833e561540ed8fd Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..477c706ef15c0f6622b55e64d3cb623cb3fd3ebd Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..305727ab77b9dcd6df44f8868efc6859b51a419e Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8c00aa871496b3293e1c008449162d449b9f64ea Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8a4fe6ed7c025ce502f81bbee92b36230457d647 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c322385c3404a0196e8d509f2f7cb892fcb0883a Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..2e158629507bc39be5c13b1b31fa3563e0ac44b1 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..b16bdbbc285bd848e6dac42208acfa3d6bfa6fab Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..0d015d54205c084ad31609cc2909853fee83476a Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..34c1d4394b358572a2c79b543cb7d094b0ba1b8d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..316080f3b56134e8a821a50511b0e831886c9184 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5edfd71412f1470c93366d821b353ebaa85ecac3 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a3214715591141e2d6fbb0f5b71f52126a571a5 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a92e099d4fc25523e2830fa9b8181d319c9369a7 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a5b5b5fc36bb88093fd25a30b2d1d7fbe9e3cba Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..dbcbea90f1434f04d12c2039f9213c704d82ec31 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a5d9089734b0a7061dc14c4afc35884cc507636 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1e7da4be43dd1a10393608d1aff9c7a87460461 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..80670a1b87a7680fbac8c690d599361f8dd8d2ea Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..48242829bc38133a9cdcd36f8b2a9eebc53ab91a Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bffe4865f5835839900292dce062bb2f24921d76 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-aws2-sns-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-aws2-sns-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..07295629cced9e2fe611c66a2ee1e1ba08cc3391 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-aws2-sns-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-aws2-sqs-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-aws2-sqs-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..274053f5529634730f315c66b2a46819b6c4ac03 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-aws2-sqs-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d8c4c56c7943f2fb3fcf2207f77f3bb7dd623550 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57b4cba6199355aa6c8fc9ad96e01f122e8613ff Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c4fe13ea86560b5be41f0602c8cef57dac8fed8 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d3e9ba64ae347356342ce7bd81694068426ce4 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..29fb4d66863e0ed67c991f16d7224d8e1b88ed98 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..83006f2ec04934050d5d41b033761f8c6f93c80a Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..66bc6931d673d72136bf7634d9d0653576bb05de Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fb9dadfc4843c7b3bd48544fe9c39068014ae49 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57eb121f34753b359ed328615b12d1335cdef5e0 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..666c3e4d030b41a58901b540d76547698cea41b4 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-health-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-health-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..acb1a38fcf308aafd8cfdcf18f02272449c930ea Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-health-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f5768a039af60a776a24b738659227810a5fc563 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b2d092cdf3c4b145df42a44b536ed0f8a604574c Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..08f353ce447773c14714390536f2abde814dff5b Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df32ff7e11ad182102a805c854bfc71cf2fec90e Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a93a240a399ca96f7f6298a849de2fdc996f066d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8b05ebef8ea87d46d09eac112c1e8a5a9a549fc Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..eab628714f1e46bbabce09f59d5149071cb7b8ee Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bcf2ba147eb2b4d9c75a637aed2c96628a8369cd Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e12f44ed11522b1f45d2935972336fb6852cb26d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-aws2-sns-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-aws2-sns-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c35adeb104811a23004683a235c6cf9cb1c93645 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-aws2-sns-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-aws2-sqs-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-aws2-sqs-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3c6ac8874024b3b4c77474741818a139b33d690d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-aws2-sqs-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7b0a2bef9c92372239c8136f4bacad2e44297a95 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..97e7161abe0534de78448dde1d119002283ced55 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a4e88280321db778bbdff6fce3aed8e6ce6e4980 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-support-aws2-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-support-aws2-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a74ab201f7cd237792b2e1f77909c3417b348912 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-support-aws2-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-support-httpclient-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-support-httpclient-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b78483edb73697a895cc9864bc5f04e338aba10b Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-support-httpclient-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..da9302ff29a560b5f10d3184f25d699fe2d9c186 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..2bb7c07363c9a44ea63fe96c827a34e296b8188c Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar new file mode 100644 index 0000000000000000000000000000000000000000..f0bdebeb94bce461c49ded7e28d6e6c63bf6a367 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b3c8ff0adc42f592363a883cd691d292aada837 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.logging.log4j.log4j-api-2.23.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.logging.log4j.log4j-api-2.23.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0e8e3f5e5928c3dc9d8d8d06e4f7aacfb5090ed4 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.apache.logging.log4j.log4j-api-2.23.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa7fdabb307af8221e7e0a1526f2c97d6ba35ec4 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..49953e8fa25ed42f4127011561a6e84869fe5d82 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..d139727d04b6b6acdfcb520566c8c60cbbcb7fb1 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6077c71e50c276649060a8fac39f6384fa67019 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d2e53ffaa9f0685843fd2b35fe18afd543249a Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c9b38d9c315bae8eb8c7d4eeacb26a8f2ca16085 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..50933a1fdc4d2f285542845bb89f9b34cef192f2 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..45581840b5a7e858949ee6198f8a1f7bd772fb32 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar new file mode 100644 index 0000000000000000000000000000000000000000..e3432492d5d204a1745e138497d9ede28ffb854e Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d2ade3e92ab51059b64ce3026377e1bd5991777 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ed4bb45df0ea4a6cb61ce9c7247b6de29ab4f98a Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..816ea17eb611606001129921b297615bcd2419fd Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..ffc43704764cd535486c28401c92f2548bd52c5f Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..0e9fcc76760c4fc3fe5fbce69a4a37783d912a11 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..d7987d7c1b270f153557179abaf61c87ed62e875 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d218bacf88c766dba04ef14130fd7e69181ffed Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.logmanager.log4j-jboss-logmanager-1.3.1.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.logmanager.log4j-jboss-logmanager-1.3.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..9309ace3bda07e45086bd9f4f84ed85faba795a9 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.logmanager.log4j-jboss-logmanager-1.3.1.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.logmanager.log4j2-jboss-logmanager-1.1.2.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.logmanager.log4j2-jboss-logmanager-1.1.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..7bcec0c6b40ce4323166f423374a4e75bda4b8d9 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.logmanager.log4j2-jboss-logmanager-1.1.2.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e90b7796c1a5d7264bc613050e05f20a674c010 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa3474de5d48916f2ded456deab5671a1375645b Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..657afffffdf6324a62474b9499dd50cf9529609f Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..26cbae47e5f0db078fc43501ed3d1b4f95a1befa Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a66a92ba5247b5a1f8e201d11643e4fb11c2a8c Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..89c644b8e286e9da107d81de25f1be0fe6447607 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..091b4d13a417d635f5a1d7a42b482f88a6f3bf65 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..e58c483f97589c9712eda2273a37e25344573390 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar new file mode 100644 index 0000000000000000000000000000000000000000..a2cb8020a5afda869b487e2f9d172dcd1e9795bf Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..511ff3211d9b29bce06c3576ddcf0139fc874bb0 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..7707e5878b8525da8750949186a3ab1056ecb5c5 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..275dd5700a389ba1902a02d49e465157942368ce Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.annotations-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.annotations-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..4f0b762a8d4009c812d5bfdf967a2cfbccf34a53 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.annotations-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.apache-client-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.apache-client-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..4b7a7f2460822889914154bcd2d1f3af8790797c Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.apache-client-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.auth-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.auth-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..0cd78f178532d3642e3f802d4d88278b8f3c4dd8 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.auth-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.aws-core-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.aws-core-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b00267eab9c0642584c9564f3cd6063324fb3b17 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.aws-core-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.aws-json-protocol-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.aws-json-protocol-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f32b431d11c3d9ae386b14a50500dcf563ac90de Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.aws-json-protocol-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.aws-query-protocol-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.aws-query-protocol-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..dfb368291966b6734e459a203252edc71ae39aae Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.aws-query-protocol-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.checksums-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.checksums-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a4f649696ac5ee8389346df99e4cf1634a6dec4 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.checksums-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.checksums-spi-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.checksums-spi-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..6c1808ed2316978532d3f173fb36ee35abc3ce1b Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.checksums-spi-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.endpoints-spi-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.endpoints-spi-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b5670b916cf57d9ae1a377021f177492032250e Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.endpoints-spi-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-auth-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-auth-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c48743cdd8da4d84f178504da07310c2809441a Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-auth-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-auth-aws-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-auth-aws-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..4ff3f48b6fd69db521057f6f2e7bd2f1e40a8526 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-auth-aws-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-auth-aws-eventstream-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-auth-aws-eventstream-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..aa1bd1f6c7088c2d7dc7980247d013d67bec5d80 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-auth-aws-eventstream-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-auth-spi-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-auth-spi-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..914e8d70ad616c46f4a0d7006410c83d65968501 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-auth-spi-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-client-spi-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-client-spi-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..eb84bdefad0954aeebe2cafd7e5deb98fa6f0b5a Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.http-client-spi-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.iam-policy-builder-2.25.44.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.iam-policy-builder-2.25.44.jar new file mode 100644 index 0000000000000000000000000000000000000000..ba053cfe0cdc423ee6873007d8484ec85af239fa Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.iam-policy-builder-2.25.44.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.identity-spi-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.identity-spi-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..568bbce294c34bc0b43dd63b3c54f1425665e5da Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.identity-spi-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.json-utils-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.json-utils-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..0e88fb9d098d407a8826e45db3ea3a7960849d21 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.json-utils-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.metrics-spi-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.metrics-spi-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..999d248286959739a2862fc3214760436be4e433 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.metrics-spi-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.netty-nio-client-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.netty-nio-client-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..33c8f50bf6562bb883a2d800b3d16e5c3e74271d Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.netty-nio-client-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.profiles-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.profiles-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..728cbb07d29758b3d72882650a20c50509df0d35 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.profiles-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.protocol-core-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.protocol-core-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..102b33ee9a210857522f6fa5a2ec16775bc7a930 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.protocol-core-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.regions-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.regions-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..49a4669a7a9a3fd55cf450ec150b665687ea9347 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.regions-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.retries-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.retries-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c48c8c4311bfab4b7e1b9a666b080b661745c01c Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.retries-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.retries-spi-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.retries-spi-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b3db22575b107f7b4333378ff2fac13bde1262f1 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.retries-spi-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.sdk-core-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.sdk-core-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..69bee38b47ce3d987e19d9aeea2a4c0315c3b103 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.sdk-core-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.sns-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.sns-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e5f3acd0c873a41513cdcdeea2c07b66bf90ee63 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.sns-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.sqs-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.sqs-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..11b47c2d395750f43f8871ca5aa9848f8ea8c556 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.sqs-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.third-party-jackson-core-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.third-party-jackson-core-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ddb6030ed1783793e92908993882ac917f15f262 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.third-party-jackson-core-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.utils-2.27.0.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.utils-2.27.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e6dab086b1ca4d32707476e42ef0e123b9a984eb Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.awssdk.utils-2.27.0.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.eventstream.eventstream-1.0.1.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.eventstream.eventstream-1.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..6397d9395d6d9c4d21f998ebd6584d58a8459b61 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/lib/main/software.amazon.eventstream.eventstream-1.0.1.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus-app-dependencies.txt b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus-app-dependencies.txt new file mode 100644 index 0000000000000000000000000000000000000000..88fcc2354c9eba4849655c4f320461ba36ad2ac1 --- /dev/null +++ b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus-app-dependencies.txt @@ -0,0 +1,292 @@ +com.aayushatharva.brotli4j:brotli4j::jar:1.16.0 +com.aayushatharva.brotli4j:native-linux-x86_64::jar:1.16.0 +com.aayushatharva.brotli4j:service::jar:1.16.0 +com.cronutils:cron-utils::jar:9.2.1 +com.fasterxml.jackson.core:jackson-annotations::jar:2.17.2 +com.fasterxml.jackson.core:jackson-core::jar:2.17.2 +com.fasterxml.jackson.core:jackson-databind::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-toml::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-yaml::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jdk8::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jsr310::jar:2.17.2 +com.fasterxml.jackson.module:jackson-module-parameter-names::jar:2.17.2 +com.github.ben-manes.caffeine:caffeine::jar:3.1.5 +com.github.luben:zstd-jni::jar:1.5.6-3 +com.google.code.gson:gson::jar:2.11.0 +com.google.errorprone:error_prone_annotations::jar:2.30.0 +com.google.guava:failureaccess::jar:1.0.1 +com.google.guava:guava::jar:33.2.1-jre +com.google.j2objc:j2objc-annotations::jar:2.8 +com.ongres.scram:client::jar:2.1 +com.ongres.scram:common::jar:2.1 +com.ongres.stringprep:saslprep::jar:1.1 +com.ongres.stringprep:stringprep::jar:1.1 +commons-codec:commons-codec::jar:1.17.1 +commons-io:commons-io::jar:2.16.1 +io.agroal:agroal-api::jar:2.5 +io.agroal:agroal-narayana::jar:2.5 +io.agroal:agroal-pool::jar:2.5 +io.github.crac:org-crac::jar:0.1.3 +io.netty:netty-buffer::jar:4.1.111.Final +io.netty:netty-codec-dns::jar:4.1.111.Final +io.netty:netty-codec-haproxy::jar:4.1.111.Final +io.netty:netty-codec-http2::jar:4.1.111.Final +io.netty:netty-codec-http::jar:4.1.111.Final +io.netty:netty-codec-mqtt::jar:4.1.111.Final +io.netty:netty-codec-socks::jar:4.1.111.Final +io.netty:netty-codec::jar:4.1.111.Final +io.netty:netty-common::jar:4.1.111.Final +io.netty:netty-handler-proxy::jar:4.1.111.Final +io.netty:netty-handler::jar:4.1.111.Final +io.netty:netty-resolver-dns::jar:4.1.111.Final +io.netty:netty-resolver::jar:4.1.111.Final +io.netty:netty-transport-classes-epoll::jar:4.1.111.Final +io.netty:netty-transport-classes-kqueue::jar:4.1.111.Final +io.netty:netty-transport-native-epoll:linux-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-kqueue:osx-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-unix-common::jar:4.1.111.Final +io.netty:netty-transport::jar:4.1.111.Final +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api-incubator::jar:2.5.0-alpha +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api::jar:2.5.0 +io.opentelemetry.semconv:opentelemetry-semconv::jar:1.26.0-alpha +io.opentelemetry:opentelemetry-api-incubator::jar:1.39.0-alpha +io.opentelemetry:opentelemetry-api::jar:1.39.0 +io.opentelemetry:opentelemetry-context::jar:1.39.0 +io.quarkiverse.loggingmanager:quarkus-logging-manager::jar:3.1.2 +io.quarkiverse.systemd.notify:quarkus-systemd-notify::jar:1.0.2 +io.quarkus.arc:arc::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common-types::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-jackson::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-vertx::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive::jar:3.14.1 +io.quarkus.security:quarkus-security::jar:2.1.0 +io.quarkus.vertx.utils:quarkus-vertx-utils::jar:3.14.1 +io.quarkus:quarkus-agroal::jar:3.14.1 +io.quarkus:quarkus-apache-httpclient::jar:3.14.1 +io.quarkus:quarkus-arc::jar:3.14.1 +io.quarkus:quarkus-bootstrap-runner::jar:3.14.1 +io.quarkus:quarkus-cache-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-cache::jar:3.14.1 +io.quarkus:quarkus-caffeine::jar:3.14.1 +io.quarkus:quarkus-classloader-commons::jar:3.14.1 +io.quarkus:quarkus-container-image-docker-common::jar:3.14.1 +io.quarkus:quarkus-container-image-docker::jar:3.14.1 +io.quarkus:quarkus-container-image::jar:3.14.1 +io.quarkus:quarkus-core::jar:3.14.1 +io.quarkus:quarkus-credentials::jar:3.14.1 +io.quarkus:quarkus-datasource-common::jar:3.14.1 +io.quarkus:quarkus-datasource::jar:3.14.1 +io.quarkus:quarkus-development-mode-spi::jar:3.14.1 +io.quarkus:quarkus-flyway-postgresql::jar:3.14.1 +io.quarkus:quarkus-flyway::jar:3.14.1 +io.quarkus:quarkus-fs-util::jar:0.0.10 +io.quarkus:quarkus-info-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-info::jar:3.14.1 +io.quarkus:quarkus-jackson::jar:3.14.1 +io.quarkus:quarkus-jaxp::jar:3.14.1 +io.quarkus:quarkus-jdbc-postgresql::jar:3.14.1 +io.quarkus:quarkus-jsonp::jar:3.14.1 +io.quarkus:quarkus-kafka-client::jar:3.14.1 +io.quarkus:quarkus-messaging-kafka::jar:3.14.1 +io.quarkus:quarkus-messaging-kotlin::jar:3.14.1 +io.quarkus:quarkus-messaging::jar:3.14.1 +io.quarkus:quarkus-mutiny-reactive-streams-operators::jar:3.14.1 +io.quarkus:quarkus-mutiny::jar:3.14.1 +io.quarkus:quarkus-narayana-jta::jar:3.14.1 +io.quarkus:quarkus-netty::jar:3.14.1 +io.quarkus:quarkus-reactive-datasource::jar:3.14.1 +io.quarkus:quarkus-reactive-pg-client::jar:3.14.1 +io.quarkus:quarkus-rest-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson::jar:3.14.1 +io.quarkus:quarkus-rest::jar:3.14.1 +io.quarkus:quarkus-scheduler-api::jar:3.14.1 +io.quarkus:quarkus-scheduler-common::jar:3.14.1 +io.quarkus:quarkus-scheduler-kotlin::jar:3.14.1 +io.quarkus:quarkus-scheduler-spi::jar:3.14.1 +io.quarkus:quarkus-scheduler::jar:3.14.1 +io.quarkus:quarkus-security-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-smallrye-context-propagation::jar:3.14.1 +io.quarkus:quarkus-smallrye-health::jar:3.14.1 +io.quarkus:quarkus-smallrye-metrics::jar:3.14.1 +io.quarkus:quarkus-smallrye-openapi::jar:3.14.1 +io.quarkus:quarkus-swagger-ui::jar:3.14.1 +io.quarkus:quarkus-tls-registry::jar:3.14.1 +io.quarkus:quarkus-transaction-annotations::jar:3.14.1 +io.quarkus:quarkus-vertx-http::jar:3.14.1 +io.quarkus:quarkus-vertx-latebound-mdc-provider::jar:3.14.1 +io.quarkus:quarkus-vertx::jar:3.14.1 +io.quarkus:quarkus-virtual-threads::jar:3.14.1 +io.smallrye.common:smallrye-common-annotation::jar:2.5.0 +io.smallrye.common:smallrye-common-classloader::jar:2.5.0 +io.smallrye.common:smallrye-common-constraint::jar:2.5.0 +io.smallrye.common:smallrye-common-cpu::jar:2.5.0 +io.smallrye.common:smallrye-common-expression::jar:2.5.0 +io.smallrye.common:smallrye-common-function::jar:2.5.0 +io.smallrye.common:smallrye-common-io::jar:2.5.0 +io.smallrye.common:smallrye-common-net::jar:2.5.0 +io.smallrye.common:smallrye-common-os::jar:2.5.0 +io.smallrye.common:smallrye-common-ref::jar:2.5.0 +io.smallrye.common:smallrye-common-vertx-context::jar:2.5.0 +io.smallrye.config:smallrye-config-common::jar:3.9.1 +io.smallrye.config:smallrye-config-core::jar:3.9.1 +io.smallrye.config:smallrye-config::jar:3.9.1 +io.smallrye.reactive:mutiny-reactive-streams-operators::jar:2.6.2 +io.smallrye.reactive:mutiny-smallrye-context-propagation::jar:2.6.2 +io.smallrye.reactive:mutiny-zero-flow-adapters::jar:1.1.0 +io.smallrye.reactive:mutiny-zero::jar:1.1.0 +io.smallrye.reactive:mutiny::jar:2.6.2 +io.smallrye.reactive:smallrye-mutiny-vertx-auth-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-bridge-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-core::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-mqtt::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-pg-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-runtime::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-sql-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-uri-template::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web::jar:3.14.0 +io.smallrye.reactive:smallrye-reactive-converter-api::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-converter-mutiny::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-messaging-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-camel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-health::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-in-memory::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-otel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-provider::jar:4.24.0 +io.smallrye.reactive:vertx-mutiny-generator::jar:3.14.0 +io.smallrye:jandex::jar:3.2.2 +io.smallrye:smallrye-context-propagation-api::jar:2.1.2 +io.smallrye:smallrye-context-propagation-jta::jar:2.1.2 +io.smallrye:smallrye-context-propagation-storage::jar:2.1.2 +io.smallrye:smallrye-context-propagation::jar:2.1.2 +io.smallrye:smallrye-fault-tolerance-vertx::jar:6.4.0 +io.smallrye:smallrye-health-api::jar:4.1.0 +io.smallrye:smallrye-health-provided-checks::jar:4.1.0 +io.smallrye:smallrye-health::jar:4.1.0 +io.smallrye:smallrye-metrics::jar:4.0.0 +io.smallrye:smallrye-open-api-core::jar:3.10.0 +io.vertx:vertx-auth-common::jar:4.5.9 +io.vertx:vertx-bridge-common::jar:4.5.9 +io.vertx:vertx-codegen::jar:4.5.9 +io.vertx:vertx-core::jar:4.5.9 +io.vertx:vertx-mqtt::jar:4.5.9 +io.vertx:vertx-pg-client::jar:4.5.9 +io.vertx:vertx-sql-client::jar:4.5.9 +io.vertx:vertx-uri-template::jar:4.5.9 +io.vertx:vertx-web-client::jar:4.5.9 +io.vertx:vertx-web-common::jar:4.5.9 +io.vertx:vertx-web::jar:4.5.9 +jakarta.activation:jakarta.activation-api::jar:2.1.3 +jakarta.annotation:jakarta.annotation-api::jar:3.0.0 +jakarta.el:jakarta.el-api::jar:5.0.1 +jakarta.enterprise:jakarta.enterprise.cdi-api::jar:4.1.0 +jakarta.enterprise:jakarta.enterprise.lang-model::jar:4.1.0 +jakarta.inject:jakarta.inject-api::jar:2.0.1 +jakarta.interceptor:jakarta.interceptor-api::jar:2.2.0 +jakarta.json:jakarta.json-api::jar:2.1.3 +jakarta.resource:jakarta.resource-api::jar:2.1.0 +jakarta.transaction:jakarta.transaction-api::jar:2.0.1 +jakarta.ws.rs:jakarta.ws.rs-api::jar:3.1.0 +jakarta.xml.bind:jakarta.xml.bind-api::jar:4.0.2 +org.apache.camel.quarkus:camel-quarkus-aws2-sns::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-aws2-sqs::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-core::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-reactive-streams::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-smallrye-reactive-messaging::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-support-aws2::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-support-httpclient::jar:3.14.0 +org.apache.camel:camel-api::jar:4.7.0 +org.apache.camel:camel-aws2-sns::jar:4.7.0 +org.apache.camel:camel-aws2-sqs::jar:4.7.0 +org.apache.camel:camel-base-engine::jar:4.7.0 +org.apache.camel:camel-base::jar:4.7.0 +org.apache.camel:camel-componentdsl::jar:4.7.0 +org.apache.camel:camel-core-catalog::jar:4.7.0 +org.apache.camel:camel-core-engine::jar:4.7.0 +org.apache.camel:camel-core-languages::jar:4.7.0 +org.apache.camel:camel-core-model::jar:4.7.0 +org.apache.camel:camel-core-processor::jar:4.7.0 +org.apache.camel:camel-core-reifier::jar:4.7.0 +org.apache.camel:camel-endpointdsl::jar:4.7.0 +org.apache.camel:camel-health::jar:4.7.0 +org.apache.camel:camel-main::jar:4.7.0 +org.apache.camel:camel-management-api::jar:4.7.0 +org.apache.camel:camel-microprofile-config::jar:4.7.0 +org.apache.camel:camel-reactive-streams::jar:4.7.0 +org.apache.camel:camel-support::jar:4.7.0 +org.apache.camel:camel-tooling-model::jar:4.7.0 +org.apache.camel:camel-util-json::jar:4.7.0 +org.apache.camel:camel-util::jar:4.7.0 +org.apache.camel:camel-xml-jaxp-util::jar:4.7.0 +org.apache.commons:commons-lang3::jar:3.14.0 +org.apache.httpcomponents:httpclient::jar:4.5.14 +org.apache.httpcomponents:httpcore::jar:4.4.16 +org.apache.kafka:kafka-clients::jar:3.7.1 +org.apache.logging.log4j:log4j-api::jar:2.23.1 +org.checkerframework:checker-qual::jar:3.46.0 +org.eclipse.microprofile.config:microprofile-config-api::jar:3.1 +org.eclipse.microprofile.context-propagation:microprofile-context-propagation-api::jar:1.3 +org.eclipse.microprofile.health:microprofile-health-api::jar:4.0.1 +org.eclipse.microprofile.metrics:microprofile-metrics-api::jar:4.0.1 +org.eclipse.microprofile.openapi:microprofile-openapi-api::jar:3.1.1 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-api::jar:3.0 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-core::jar:3.0 +org.eclipse.parsson:parsson::jar:1.1.7 +org.flywaydb:flyway-core::jar:10.17.1 +org.flywaydb:flyway-database-postgresql::jar:10.17.1 +org.glassfish.expressly:expressly::jar:5.0.0 +org.jboss.invocation:jboss-invocation::jar:2.0.0.Final +org.jboss.logging:commons-logging-jboss-logging::jar:1.0.0.Final +org.jboss.logging:jboss-logging-annotations::jar:3.0.1.Final +org.jboss.logging:jboss-logging::jar:3.6.0.Final +org.jboss.logmanager:jboss-logmanager::jar:3.0.6.Final +org.jboss.logmanager:log4j-jboss-logmanager::jar:1.3.1.Final +org.jboss.logmanager:log4j2-jboss-logmanager::jar:1.1.2.Final +org.jboss.narayana.jta:narayana-jta::jar:7.0.2.Final +org.jboss.narayana.jts:narayana-jts-integration::jar:7.0.2.Final +org.jboss.slf4j:slf4j-jboss-logmanager::jar:2.0.0.Final +org.jboss.threads:jboss-threads::jar:3.6.1.Final +org.jboss:jboss-transaction-spi::jar:8.0.0.Final +org.jctools:jctools-core::jar:4.0.5 +org.lz4:lz4-java::jar:1.8.0 +org.postgresql:postgresql::jar:42.7.4 +org.reactivestreams:reactive-streams::jar:1.0.4 +org.slf4j:slf4j-api::jar:2.0.6 +org.wildfly.common:wildfly-common::jar:1.7.0.Final +org.xerial.snappy:snappy-java::jar:1.1.10.5 +org.yaml:snakeyaml::jar:2.2 +software.amazon.awssdk:annotations::jar:2.27.0 +software.amazon.awssdk:apache-client::jar:2.27.0 +software.amazon.awssdk:auth::jar:2.27.0 +software.amazon.awssdk:aws-core::jar:2.27.0 +software.amazon.awssdk:aws-json-protocol::jar:2.27.0 +software.amazon.awssdk:aws-query-protocol::jar:2.27.0 +software.amazon.awssdk:checksums-spi::jar:2.27.0 +software.amazon.awssdk:checksums::jar:2.27.0 +software.amazon.awssdk:endpoints-spi::jar:2.27.0 +software.amazon.awssdk:http-auth-aws-eventstream::jar:2.27.0 +software.amazon.awssdk:http-auth-aws::jar:2.27.0 +software.amazon.awssdk:http-auth-spi::jar:2.27.0 +software.amazon.awssdk:http-auth::jar:2.27.0 +software.amazon.awssdk:http-client-spi::jar:2.27.0 +software.amazon.awssdk:iam-policy-builder::jar:2.25.44 +software.amazon.awssdk:identity-spi::jar:2.27.0 +software.amazon.awssdk:json-utils::jar:2.27.0 +software.amazon.awssdk:metrics-spi::jar:2.27.0 +software.amazon.awssdk:netty-nio-client::jar:2.27.0 +software.amazon.awssdk:profiles::jar:2.27.0 +software.amazon.awssdk:protocol-core::jar:2.27.0 +software.amazon.awssdk:regions::jar:2.27.0 +software.amazon.awssdk:retries-spi::jar:2.27.0 +software.amazon.awssdk:retries::jar:2.27.0 +software.amazon.awssdk:sdk-core::jar:2.27.0 +software.amazon.awssdk:sns::jar:2.27.0 +software.amazon.awssdk:sqs::jar:2.27.0 +software.amazon.awssdk:third-party-jackson-core::jar:2.27.0 +software.amazon.awssdk:utils::jar:2.27.0 +software.amazon.eventstream:eventstream::jar:1.0.1 diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus-run.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus-run.jar new file mode 100644 index 0000000000000000000000000000000000000000..8c3686477e7bb6ece40a2cbb9f7bed94fdb63dfa Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus-run.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus/generated-bytecode.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus/generated-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..a197185d868215bae7875db23213d97c9c0f65b6 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus/generated-bytecode.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus/quarkus-application.dat b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus/quarkus-application.dat new file mode 100644 index 0000000000000000000000000000000000000000..a7a38560a83333e53789ab9613d4591a2b8b0d96 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus/quarkus-application.dat differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus/transformed-bytecode.jar b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus/transformed-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..d9a895fdf88f765ece6d85dbf8139b38e3b33224 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/quarkus-app/quarkus/transformed-bytecode.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/quarkus-artifact.properties b/scorpio-broker/SnsFanoutMessaging/target/quarkus-artifact.properties new file mode 100644 index 0000000000000000000000000000000000000000..15ece3cdf4e25bb1f48856d5f25ed5956837010f --- /dev/null +++ b/scorpio-broker/SnsFanoutMessaging/target/quarkus-artifact.properties @@ -0,0 +1,4 @@ +# Generated by Quarkus - Do not edit manually +metadata.container-image=scorpiobroker/sns-fanout-messaging\:5.0.5-SNAPSHOT +metadata.pull-required=false +type=jar-container diff --git a/scorpio-broker/SnsFanoutMessaging/target/sns-fanout-messaging-5.0.5-SNAPSHOT.jar b/scorpio-broker/SnsFanoutMessaging/target/sns-fanout-messaging-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..6beb7fc7ed981c400fb2f396ce59928e7e3e7a71 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/sns-fanout-messaging-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/test-classes/eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutComponentTest.class b/scorpio-broker/SnsFanoutMessaging/target/test-classes/eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutComponentTest.class new file mode 100644 index 0000000000000000000000000000000000000000..d713bb4140622f8a4f46a2ce22ba1b93e6c0ae1a Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/test-classes/eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutComponentTest.class differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/test-classes/eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutEndpointTest.class b/scorpio-broker/SnsFanoutMessaging/target/test-classes/eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutEndpointTest.class new file mode 100644 index 0000000000000000000000000000000000000000..84f602aa7d31e8275498fb4faad80f8907bcbb56 Binary files /dev/null and b/scorpio-broker/SnsFanoutMessaging/target/test-classes/eu/neclab/ngsildbroker/messaging/sns/SnsSqsFanoutEndpointTest.class differ diff --git a/scorpio-broker/SnsFanoutMessaging/target/test-classes/mockito-extensions/org.mockito.plugins.MockMaker b/scorpio-broker/SnsFanoutMessaging/target/test-classes/mockito-extensions/org.mockito.plugins.MockMaker new file mode 100644 index 0000000000000000000000000000000000000000..ca6ee9cea8ec189a088d50559325d4e84ff8ad09 --- /dev/null +++ b/scorpio-broker/SnsFanoutMessaging/target/test-classes/mockito-extensions/org.mockito.plugins.MockMaker @@ -0,0 +1 @@ +mock-maker-inline \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/.gitignore b/scorpio-broker/SpringCloudModules/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..a1c3ab4d08c0f9f91918f21c730272a4711885e8 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/.gitignore @@ -0,0 +1,4 @@ +/target/ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/SpringCloudModules/config-server/.gitignore b/scorpio-broker/SpringCloudModules/config-server/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..01a82dee7a1a150e129f4bc79186afab0080aa69 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/.gitignore @@ -0,0 +1,5 @@ +/target/ +/.settings/ +.classpath +.project + diff --git a/scorpio-broker/SpringCloudModules/config-server/ReadMe b/scorpio-broker/SpringCloudModules/config-server/ReadMe new file mode 100644 index 0000000000000000000000000000000000000000..c3d1176eea9e483a2edd83121e17b2be6e7cd135 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/ReadMe @@ -0,0 +1,7 @@ +Limitation: +If yml file is change then restart micro-service + +for example +if changes in query-mqnqger.yml then need to restart query manager micro-service. + + \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/config-server/dockerfile4maven b/scorpio-broker/SpringCloudModules/config-server/dockerfile4maven new file mode 100644 index 0000000000000000000000000000000000000000..8a9cad9642bdedb91f078bb58edd160842fd8ec8 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/dockerfile4maven @@ -0,0 +1,13 @@ +FROM openjdk:22-ea-21-jdk-slim + +WORKDIR /usr/src/scorpio +ARG JAR_FILE_BUILD +ARG JAR_FILE_RUN +ENV JAR_FILE_RUN ${JAR_FILE_RUN} + +COPY target/${JAR_FILE_BUILD} ./${JAR_FILE_RUN} +COPY src/main/resources/application-dist.yml ./config/application.yml + +ENV spring_args "" + +CMD java -jar $JAR_FILE_RUN ${spring_args} diff --git a/scorpio-broker/SpringCloudModules/config-server/pom.xml b/scorpio-broker/SpringCloudModules/config-server/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..528087b17d410c60653b606c5a63f6d838e9f89e --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/pom.xml @@ -0,0 +1,71 @@ + + + 4.0.0 + + eu.neclab.ngsildbroker + springcloud + 1.0.0-SNAPSHOT + + config-server + config-server + 1.0.0-SNAPSHOT + http://maven.apache.org + + UTF-8 + + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-client + + + org.springframework.cloud + spring-cloud-config-server + + + junit + junit + test + + + com.thoughtworks.xstream + xstream + 1.4.19 + + + + + docker + + + + com.spotify + dockerfile-maven-plugin + 1.4.12 + + + default + + build + push + + + + + dockerfile4maven + scorpiobroker/scorpio + ${project.artifactId}_${project.version} + + ${project.build.finalName}.jar + ${project.artifactId}.jar + + + + + + + + + diff --git a/scorpio-broker/SpringCloudModules/config-server/src/main/java/eu/neclab/ngsildbroker/configserver/ConfigServerApplication.java b/scorpio-broker/SpringCloudModules/config-server/src/main/java/eu/neclab/ngsildbroker/configserver/ConfigServerApplication.java new file mode 100644 index 0000000000000000000000000000000000000000..2a54f324db897d0d83feafe03d502b1786b0cb51 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/src/main/java/eu/neclab/ngsildbroker/configserver/ConfigServerApplication.java @@ -0,0 +1,15 @@ +package eu.neclab.ngsildbroker.configserver; + +import org.springframework.boot.SpringApplication; +import org.springframework.cloud.config.server.EnableConfigServer; +import org.springframework.boot.autoconfigure.SpringBootApplication; + +@EnableConfigServer +@SpringBootApplication + +public class ConfigServerApplication { + + public static void main(String[] args) { + SpringApplication.run(ConfigServerApplication.class, args); + } +} diff --git a/scorpio-broker/SpringCloudModules/config-server/src/main/resources/application-aaio.yml b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/application-aaio.yml new file mode 100644 index 0000000000000000000000000000000000000000..1494704e461b4aa75cb59fad6a48283b6b70ea4e --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/application-aaio.yml @@ -0,0 +1,9 @@ +server: + port: 8888 +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ +logging: + level: + root: ERROR \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/config-server/src/main/resources/application-aio.yml b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/application-aio.yml new file mode 100644 index 0000000000000000000000000000000000000000..1494704e461b4aa75cb59fad6a48283b6b70ea4e --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/application-aio.yml @@ -0,0 +1,9 @@ +server: + port: 8888 +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ +logging: + level: + root: ERROR \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/config-server/src/main/resources/application-dist.yml b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/application-dist.yml new file mode 100644 index 0000000000000000000000000000000000000000..fdb46ba24efcb2e3648aa74f22ea8d057ab7e3da --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/application-dist.yml @@ -0,0 +1,9 @@ +server: + port: 8888 +eureka: + client: + serviceUrl: + defaultZone: http://eureka:8761/eureka/ +logging: + level: + root: ERROR \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/config-server/src/main/resources/application.yml b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..60686a45e80f26eb94e035a6da5c1debaa26f292 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/application.yml @@ -0,0 +1,24 @@ +spring: + profiles: + active: native + application: + name: configserver + main: + lazy-initialization: true + cloud: + config: + server: + native: + searchLocations: classpath:/config +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + registerWithEureka: true + fetchRegistry: true +server: + port: 8888 +logging: + level: + root: ERROR +#spring.cloud.config.server.git.uri=https://github.com/husseinterek/spring-cloud-repo.git diff --git a/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/c-sources.yml b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/c-sources.yml new file mode 100644 index 0000000000000000000000000000000000000000..158dcc0a49b85da02d5fabc2c56caf69abee0597 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/c-sources.yml @@ -0,0 +1,57 @@ +management: + security: + enabled: false + +security: + sessions: NEVER + oauth2: + resource: + userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo +# Configure resources +secEnabled: false +auth: + - api: /* + role: + - USER_G + - USER_PG + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + GET + - api: /* + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: /* + role: + - USER_GUD + - USER_PUG + - ADMIN_PUGD + method: + PATCH + - api: /* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE + - api: / + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: / + role: + - USER_G + - USER_PG + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + GET \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/entity-manager.yml b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/entity-manager.yml new file mode 100644 index 0000000000000000000000000000000000000000..081df638cbdacb705a84c53495fdc8b1063e0253 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/entity-manager.yml @@ -0,0 +1,52 @@ +management: + security: + enabled: false + +security: + sessions: NEVER + oauth2: + resource: + userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo +# Configure resources +secEnabled: false +auth: + - api: /* + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: /* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE + - api: /*/attrs + role: + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + PATCH + - api: /*/attrs + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: /*/attrs/* + role: + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + PATCH + - api: /*/attrs/* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/history-manager.yml b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/history-manager.yml new file mode 100644 index 0000000000000000000000000000000000000000..3a2be36c6024778c8b220f673564ae4ce343ce25 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/history-manager.yml @@ -0,0 +1,68 @@ +management: + security: + enabled: false + +security: + sessions: NEVER + oauth2: + resource: + userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo +# Configure resources +secEnabled: false +auth: + - api: /entities + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: /entities + role: + - USER_G + - USER_PG + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + GET + - api: /entities/* + role: + - USER_G + - USER_PG + - USER_PUG + - USER_GUD + method: + GET + - api: /entities/* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE + - api: /entities/*/attrs/ + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: /entities/*/attrs/* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE + - api: /entities/*/attrs/*/* + role: + - USER_GUD + - USER_PUG + - ADMIN_PUGD + method: + PATCH + - api: /entities/*/attrs/*/* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/query-manager.yml b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/query-manager.yml new file mode 100644 index 0000000000000000000000000000000000000000..485eab6e15cdeeb10ecfe8eb49464f856d57271a --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/query-manager.yml @@ -0,0 +1,21 @@ +management: + security: + enabled: false + +security: + sessions: NEVER + oauth2: + resource: + userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo +# Configure resources +secEnabled: false +auth: + - api: /* + role: + - USER_G + - USER_PG + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + GET \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/subscription-manager.yml b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/subscription-manager.yml new file mode 100644 index 0000000000000000000000000000000000000000..0ae20cc00530d47fc0c34d9347dd010dd5b0ad37 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/subscription-manager.yml @@ -0,0 +1,50 @@ +management: + security: + enabled: false + +security: + sessions: NEVER + oauth2: + resource: + userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo +# Configure resources +secEnabled: false +auth: + - api: / + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: / + role: + - USER_G + - USER_PG + - USER_GUD + - USER_PUG + - ADMIN_PUGD + method: + GET + - api: /* + role: + - USER_G + - USER_PG + - USER_GUD + - USER_PUG + - ADMIN_PUGD + method: + GET + - api: /* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE + - api: /* + role: + - USER_GUD + - USER_PUG + - ADMIN_PUGD + method: + PATCH \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/testmanager.yml b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/testmanager.yml new file mode 100644 index 0000000000000000000000000000000000000000..081df638cbdacb705a84c53495fdc8b1063e0253 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/src/main/resources/config/testmanager.yml @@ -0,0 +1,52 @@ +management: + security: + enabled: false + +security: + sessions: NEVER + oauth2: + resource: + userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo +# Configure resources +secEnabled: false +auth: + - api: /* + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: /* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE + - api: /*/attrs + role: + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + PATCH + - api: /*/attrs + role: + - USER_PG + - USER_PUG + - ADMIN_PUGD + method: + POST + - api: /*/attrs/* + role: + - USER_PUG + - USER_GUD + - ADMIN_PUGD + method: + PATCH + - api: /*/attrs/* + role: + - USER_GUD + - ADMIN_PUGD + method: + DELETE \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/config-server/src/test/java/poc/nec/configserver/AppTest.java b/scorpio-broker/SpringCloudModules/config-server/src/test/java/poc/nec/configserver/AppTest.java new file mode 100644 index 0000000000000000000000000000000000000000..3f44cfd229cb6e506d9f7d02c7d5ce2c37a0850b --- /dev/null +++ b/scorpio-broker/SpringCloudModules/config-server/src/test/java/poc/nec/configserver/AppTest.java @@ -0,0 +1,38 @@ +package poc.nec.configserver; + +import junit.framework.Test; +import junit.framework.TestCase; +import junit.framework.TestSuite; + +/** + * Unit test for simple App. + */ +public class AppTest + extends TestCase +{ + /** + * Create the test case + * + * @param testName name of the test case + */ + public AppTest( String testName ) + { + super( testName ); + } + + /** + * @return the suite of tests being tested + */ + public static Test suite() + { + return new TestSuite( AppTest.class ); + } + + /** + * Rigourous Test :-) + */ + public void testApp() + { + assertTrue( true ); + } +} diff --git a/scorpio-broker/SpringCloudModules/eureka/.gitignore b/scorpio-broker/SpringCloudModules/eureka/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..a1c3ab4d08c0f9f91918f21c730272a4711885e8 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/eureka/.gitignore @@ -0,0 +1,4 @@ +/target/ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/SpringCloudModules/eureka/dockerfile4maven b/scorpio-broker/SpringCloudModules/eureka/dockerfile4maven new file mode 100644 index 0000000000000000000000000000000000000000..8a9cad9642bdedb91f078bb58edd160842fd8ec8 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/eureka/dockerfile4maven @@ -0,0 +1,13 @@ +FROM openjdk:22-ea-21-jdk-slim + +WORKDIR /usr/src/scorpio +ARG JAR_FILE_BUILD +ARG JAR_FILE_RUN +ENV JAR_FILE_RUN ${JAR_FILE_RUN} + +COPY target/${JAR_FILE_BUILD} ./${JAR_FILE_RUN} +COPY src/main/resources/application-dist.yml ./config/application.yml + +ENV spring_args "" + +CMD java -jar $JAR_FILE_RUN ${spring_args} diff --git a/scorpio-broker/SpringCloudModules/eureka/pom.xml b/scorpio-broker/SpringCloudModules/eureka/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..8bcec032abfd07b0d5c066e8418fa594b57bc107 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/eureka/pom.xml @@ -0,0 +1,92 @@ + + + 4.0.0 + + eureka-server + jar + + eureka-server + Base Eureka server for Scorpio + + + eu.neclab.ngsildbroker + SpringCloudParent + 1.0.0-SNAPSHOT + ../../SpringCloudParent + + 1.0.0-SNAPSHOT + + + java-above-8-support + + [9.0,) + + + + com.sun.xml.bind + jaxb-core + 2.3.0.1 + + + javax.xml.bind + jaxb-api + 2.3.1 + + + com.sun.xml.bind + jaxb-impl + 2.3.1 + + + com.thoughtworks.xstream + xstream + 1.4.19 + + + + + + + docker + + + + com.spotify + dockerfile-maven-plugin + 1.4.12 + + + default + + build + push + + + + + dockerfile4maven + scorpiobroker/scorpio + ${project.artifactId}_${project.version} + + ${project.build.finalName}.jar + ${project.artifactId}.jar + + + + + + + + + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-server + + + + + + + diff --git a/scorpio-broker/SpringCloudModules/eureka/src/main/java/eu/neclab/ngsildbroker/eurekaserver/EurekaServerApplication.java b/scorpio-broker/SpringCloudModules/eureka/src/main/java/eu/neclab/ngsildbroker/eurekaserver/EurekaServerApplication.java new file mode 100644 index 0000000000000000000000000000000000000000..186c37c921543d1d543cc649660e74fa25ef503b --- /dev/null +++ b/scorpio-broker/SpringCloudModules/eureka/src/main/java/eu/neclab/ngsildbroker/eurekaserver/EurekaServerApplication.java @@ -0,0 +1,14 @@ +package eu.neclab.ngsildbroker.eurekaserver; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.cloud.netflix.eureka.server.EnableEurekaServer; + +@SpringBootApplication +@EnableEurekaServer +public class EurekaServerApplication { + + public static void main(String[] args) { + SpringApplication.run(EurekaServerApplication.class, args); + } +} diff --git a/scorpio-broker/SpringCloudModules/eureka/src/main/resources/application-aaio.yml b/scorpio-broker/SpringCloudModules/eureka/src/main/resources/application-aaio.yml new file mode 100644 index 0000000000000000000000000000000000000000..c2e1750111b7f491af51274e182efd6050710250 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/eureka/src/main/resources/application-aaio.yml @@ -0,0 +1,5 @@ +server: + port: 8761 +logging: + level: + root: ERROR \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/eureka/src/main/resources/application-aio.yml b/scorpio-broker/SpringCloudModules/eureka/src/main/resources/application-aio.yml new file mode 100644 index 0000000000000000000000000000000000000000..290a298e4dc8cd986786e6e331eda6bbbf72052b --- /dev/null +++ b/scorpio-broker/SpringCloudModules/eureka/src/main/resources/application-aio.yml @@ -0,0 +1,5 @@ +server: + port: 8761 +logging: + level: + root: ERROR diff --git a/scorpio-broker/SpringCloudModules/eureka/src/main/resources/application-dist.yml b/scorpio-broker/SpringCloudModules/eureka/src/main/resources/application-dist.yml new file mode 100644 index 0000000000000000000000000000000000000000..c2e1750111b7f491af51274e182efd6050710250 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/eureka/src/main/resources/application-dist.yml @@ -0,0 +1,5 @@ +server: + port: 8761 +logging: + level: + root: ERROR \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/eureka/src/main/resources/application.yml b/scorpio-broker/SpringCloudModules/eureka/src/main/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..e218b2315018151ed325485ddbc09b5868414267 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/eureka/src/main/resources/application.yml @@ -0,0 +1,12 @@ +spring: + application: + name: eureka-server +server: + port: 8761 +eureka: + client: + register-with-eureka: false + fetch-registry: false +logging: + level: + root: ERROR diff --git a/scorpio-broker/SpringCloudModules/eureka/src/test/java/poc/nec/eurekaserver/EurekaServerApplicationTests.java b/scorpio-broker/SpringCloudModules/eureka/src/test/java/poc/nec/eurekaserver/EurekaServerApplicationTests.java new file mode 100644 index 0000000000000000000000000000000000000000..77f9f536f10784763fdf6bc1864070dd6cc9f7a8 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/eureka/src/test/java/poc/nec/eurekaserver/EurekaServerApplicationTests.java @@ -0,0 +1,16 @@ +package poc.nec.eurekaserver; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest(properties= {"spring.main.allow-bean-definition-overriding=true"}) +public class EurekaServerApplicationTests { + + @Test + public void contextLoads() { + } + +} diff --git a/scorpio-broker/SpringCloudModules/gateway/.gitignore b/scorpio-broker/SpringCloudModules/gateway/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..a1c3ab4d08c0f9f91918f21c730272a4711885e8 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/gateway/.gitignore @@ -0,0 +1,4 @@ +/target/ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/SpringCloudModules/gateway/ReadMe b/scorpio-broker/SpringCloudModules/gateway/ReadMe new file mode 100644 index 0000000000000000000000000000000000000000..75610e4616f0422407d6bef836b9d4f9ee08d055 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/gateway/ReadMe @@ -0,0 +1,62 @@ +IDENTITY MANAGER-KEYROCK + +#PREREQUISITE +Node.js +Node packaged modules +MYSQL + +#for install mysql +sudo apt-get update +sudo apt-get install mysql-server + +#for install nodejs +apt-get install curl python-software-properties -y +curl -sL https://deb.nodesource.com/setup_10.x | sudo -E bash - +sudo apt-get install nodejs -y + +#The following step to need to install keyrock and running. +Clone the repository +git clone https://github.com/ging/fiware-idm +Install all required libraries using npm +cd fiware-idm +npm install +To configure the keyrock you can copy the file named config.js.template to config.js. +cp config.js.template config.js + +#You can edit it with corresponding with basic information. + +#Configure port and host: +config.host = "http://localhost:3000"; +config.port = 3000; + +#Configure database +config.database = { +host: “localhost†, +password: “root†, +username: “root†, +database: “idm†, + dialect: “mysql†, + port:3306 +}; + +#For create the database +npm run-script create_db +npm run-script migrate_db +npm run-script seed_db + +Launch the server +npm start + +#open in browser +http://ip_addess:3000/auth/login + +Enter email and password +default email and password: +email: admin@test.com +password: 1234 + +#Register the application in keyrock +application name: +Description: +Url: http://ip_address:9090 +Callback Url: http://ip_address:9090/login diff --git a/scorpio-broker/SpringCloudModules/gateway/dockerfile4maven b/scorpio-broker/SpringCloudModules/gateway/dockerfile4maven new file mode 100644 index 0000000000000000000000000000000000000000..236b96104ae6ff47b13cfaec56dd1a4479038f2f --- /dev/null +++ b/scorpio-broker/SpringCloudModules/gateway/dockerfile4maven @@ -0,0 +1,13 @@ +FROM openjdk:22-ea-21-jdk-slim + +WORKDIR /usr/src/scorpio +ARG JAR_FILE_BUILD +ARG JAR_FILE_RUN +ENV JAR_FILE_RUN ${JAR_FILE_RUN} + +COPY target/${JAR_FILE_BUILD} ./${JAR_FILE_RUN} +COPY src/main/resources/application-dist.yml ./config/application.yml + +ENV spring_args "" + +CMD java -jar $JAR_FILE_RUN ${spring_args} --server.address=0.0.0.0 diff --git a/scorpio-broker/SpringCloudModules/gateway/pom.xml b/scorpio-broker/SpringCloudModules/gateway/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..caeae7cdd97d4cdb44b835b0f6d45a3fe2885d0f --- /dev/null +++ b/scorpio-broker/SpringCloudModules/gateway/pom.xml @@ -0,0 +1,133 @@ + + + 4.0.0 + + gateway + jar + + gateway + Demo project for Spring Boot + + + eu.neclab.ngsildbroker + SpringCloudParent + 1.0.0-SNAPSHOT + ../../SpringCloudParent + + 1.0.0-SNAPSHOT + + + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-client + + + org.springframework.boot + spring-boot-starter-test + test + + + org.springframework.cloud + spring-cloud-starter-netflix-zuul + + + io.springfox + springfox-swagger2 + 2.6.1 + + + io.springfox + springfox-swagger-ui + 2.6.1 + + + org.springframework.cloud + spring-cloud-starter-oauth2 + + + org.springframework.cloud + spring-cloud-starter-security + + + com.thoughtworks.xstream + xstream + 1.4.19 + + + + + + + org.springframework.cloud + spring-cloud-dependencies + ${spring-cloud.version} + pom + import + + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + + docker + + + + com.spotify + dockerfile-maven-plugin + 1.4.12 + + + default + + build + push + + + + + dockerfile4maven + scorpiobroker/scorpio + ${project.artifactId}_${project.version} + + ${project.build.finalName}.jar + ${project.artifactId}.jar + + + + + + + + + + diff --git a/scorpio-broker/SpringCloudModules/gateway/src/main/java/eu/neclab/ngsildbroker/gateway/GatewayApplication.java b/scorpio-broker/SpringCloudModules/gateway/src/main/java/eu/neclab/ngsildbroker/gateway/GatewayApplication.java new file mode 100644 index 0000000000000000000000000000000000000000..73671f2dc1dcca5a6d6079df273db65dd2637df4 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/gateway/src/main/java/eu/neclab/ngsildbroker/gateway/GatewayApplication.java @@ -0,0 +1,107 @@ +package eu.neclab.ngsildbroker.gateway; + +import org.apache.catalina.connector.Connector; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.web.embedded.tomcat.TomcatConnectorCustomizer; +import org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory; +import org.springframework.boot.web.servlet.server.ConfigurableServletWebServerFactory; +import org.springframework.cloud.netflix.eureka.EnableEurekaClient; +import org.springframework.cloud.netflix.zuul.EnableZuulProxy; +import org.springframework.context.annotation.Bean; +import org.springframework.http.HttpMethod; +import org.springframework.web.cors.CorsConfiguration; +import org.springframework.web.cors.UrlBasedCorsConfigurationSource; +import org.springframework.web.filter.CorsFilter; + +@SpringBootApplication +@EnableZuulProxy +@EnableEurekaClient +public class GatewayApplication { + public static void main(String[] args) { + SpringApplication.run(GatewayApplication.class, args); + } + @Value("${gateway.enablecors:false}") + boolean enableCors; + @Value("${gateway.enablecors.allowall:false}") + boolean allowAllCors; + + @Value("${gateway.enablecors.allowedorigin:null}") + String allowedOrigin; + + @Value("${gateway.enablecors.allowedheader:null}") + String allowedHeader; + + @Value("${gateway.enablecors.allowallmethods:false}") + boolean allowAllCorsMethods; + + @Value("${gateway.enablecors.allowedmethods:null}") + String allowedMethods; + + + @Bean + public CorsFilter corsFilter() { + final UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource(); + if(!enableCors) { + return new CorsFilter(source); + } + final CorsConfiguration config = new CorsConfiguration(); + if(allowAllCors) { + config.setAllowCredentials(true); + config.addAllowedOrigin("*"); + config.addAllowedHeader("*"); + config.addAllowedMethod(HttpMethod.DELETE); + config.addAllowedMethod(HttpMethod.POST); + config.addAllowedMethod(HttpMethod.GET); + config.addAllowedMethod(HttpMethod.OPTIONS); + config.addAllowedMethod(HttpMethod.PATCH); + config.addAllowedMethod(HttpMethod.PUT); + config.addAllowedMethod(HttpMethod.HEAD); + config.addAllowedMethod(HttpMethod.TRACE); + source.registerCorsConfiguration("/**", config); + }else { + if(allowedOrigin != null) { + for(String origin: allowedOrigin.split(",")) { + config.addAllowedOrigin(origin); + } + + } + if(allowedHeader != null) { + for(String header: allowedHeader.split(",")) { + config.addAllowedHeader(header); + } + } + if(allowAllCorsMethods) { + config.addAllowedMethod(HttpMethod.DELETE); + config.addAllowedMethod(HttpMethod.POST); + config.addAllowedMethod(HttpMethod.GET); + config.addAllowedMethod(HttpMethod.OPTIONS); + config.addAllowedMethod(HttpMethod.PATCH); + config.addAllowedMethod(HttpMethod.PUT); + config.addAllowedMethod(HttpMethod.HEAD); + config.addAllowedMethod(HttpMethod.TRACE); + }else { + if(allowedMethods != null) { + for(String method: allowedMethods.split(",")) { + config.addAllowedMethod(method); + } + } + } + } + + + return new CorsFilter(source); + } + @Bean + public ConfigurableServletWebServerFactory webServerFactory() { + TomcatServletWebServerFactory factory = new TomcatServletWebServerFactory(); + factory.addConnectorCustomizers(new TomcatConnectorCustomizer() { + @Override + public void customize(Connector connector) { + connector.setProperty("relaxedQueryChars", "|{}[]"); + } + }); + return factory; + } +} diff --git a/scorpio-broker/SpringCloudModules/gateway/src/main/java/eu/neclab/ngsildbroker/gateway/config/PreFilter.java b/scorpio-broker/SpringCloudModules/gateway/src/main/java/eu/neclab/ngsildbroker/gateway/config/PreFilter.java new file mode 100644 index 0000000000000000000000000000000000000000..e0f630e5f6ca362f8ef4f0e9784050ebe9ca68f7 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/gateway/src/main/java/eu/neclab/ngsildbroker/gateway/config/PreFilter.java @@ -0,0 +1,100 @@ +package eu.neclab.ngsildbroker.gateway.config; + +import javax.servlet.http.HttpServletRequest; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.cloud.client.ServiceInstance; +import org.springframework.cloud.client.loadbalancer.LoadBalancerClient; +import org.springframework.stereotype.Component; + +import com.netflix.zuul.ZuulFilter; +import com.netflix.zuul.context.RequestContext; +import com.netflix.zuul.exception.ZuulException; + +/** + * To route all GET request for entities to Query-Manager service + * + * @author Kailash Adhikari + * @version 1.0 + * @date 10-Jul-2018 + */ +@Component +public class PreFilter extends ZuulFilter { + + @Value("${query-manager.request-path}") + private String REQUEST_PATH; + @Value("${query-manager.target-service}") + private String TARGET_SERVICE; + @Value("${query-manager.http-method}") + private String HTTP_METHOD; + + @Value("${entity-manager.target-service}") + private String EM_TARGET_SERVICE; + @Value("${entity-manager.http-method}") + private String EM_HTTP_METHOD; + + static boolean switchQMEM = true; + + @Autowired + private LoadBalancerClient loadBalancer; + + /** + * route GET requests to query-manager microservice + */ + @Override + public Object run() throws ZuulException { + RequestContext context = RequestContext.getCurrentContext(); + ServiceInstance serviceInstance; + + if (switchQMEM) { + serviceInstance = loadBalancer.choose(this.TARGET_SERVICE); + } else { + serviceInstance = loadBalancer.choose(this.EM_TARGET_SERVICE); + } + try { + if (serviceInstance != null) { + context.setRouteHost(serviceInstance.getUri().toURL()); + } else { + throw new IllegalStateException("Target service instance not found!"); + } + } catch (Exception e) { + throw new IllegalArgumentException("Couldn't get service URL!", e); + } + return null; + } + + /** + * intercept requests and all GET requests will be handled by run() method. + */ + @Override + public boolean shouldFilter() { + RequestContext context = RequestContext.getCurrentContext(); + HttpServletRequest request = context.getRequest(); + String method = request.getMethod(); + String requestURI = request.getRequestURI(); + if (!requestURI.startsWith(REQUEST_PATH)) { + return false; + } + if (HTTP_METHOD.equalsIgnoreCase(method)) { + switchQMEM = true; + return true; + } + /* + * if (EM_HTTP_METHOD.equalsIgnoreCase(method)) { switchQMEM = false; return + * true; } + */ + return false; + } + + @Override + public int filterOrder() { + return 0; + } + + @Override + public String filterType() { + return "route"; + } + +} diff --git a/scorpio-broker/SpringCloudModules/gateway/src/main/java/eu/neclab/ngsildbroker/gateway/config/SiteSecurityConfigurer.java b/scorpio-broker/SpringCloudModules/gateway/src/main/java/eu/neclab/ngsildbroker/gateway/config/SiteSecurityConfigurer.java new file mode 100644 index 0000000000000000000000000000000000000000..257c6f31dac3c74633adac8090917c2ed1d6ec07 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/gateway/src/main/java/eu/neclab/ngsildbroker/gateway/config/SiteSecurityConfigurer.java @@ -0,0 +1,43 @@ +package eu.neclab.ngsildbroker.gateway.config; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.security.oauth2.client.EnableOAuth2Sso; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; + +@Configuration +@EnableOAuth2Sso +@EnableWebSecurity +public class SiteSecurityConfigurer extends WebSecurityConfigurerAdapter { + + @Value("${security.active}") + private String securityEnabled; + + @Override + protected void configure(HttpSecurity http) throws Exception { + if (securityEnabled.equalsIgnoreCase("true")) { + http.antMatcher("/**") + .authorizeRequests() + .antMatchers("/", "/webjars/**") + .permitAll() + .anyRequest() + .authenticated() + .and() + .logout() + .logoutSuccessUrl("/") + .permitAll() + .and() + .csrf() + .disable(); + } else { + http.antMatcher("/**") + .authorizeRequests().antMatchers("/", "/webjars/**") + .permitAll() + .and() + .csrf() + .disable(); + } + } +} diff --git a/scorpio-broker/SpringCloudModules/gateway/src/main/resources/application-aaio.yml b/scorpio-broker/SpringCloudModules/gateway/src/main/resources/application-aaio.yml new file mode 100644 index 0000000000000000000000000000000000000000..809ad1acb5607babff4679661649ac6d1c4bd65a --- /dev/null +++ b/scorpio-broker/SpringCloudModules/gateway/src/main/resources/application-aaio.yml @@ -0,0 +1,131 @@ +# Increase the Hystrix timeout to 60s (globally) +hystrix: + command: + default: + execution: + isolation: + thread: + timeoutInMilliseconds: 60000 + + +#GET request configuration for QUERY-MANAGER +ribbon: + ReadTimeout: 60000 + ConnectTimeout: 60000 + + + +query-manager: + request-path: /ngsi-ld/v1/entities + target-service: aio-runner + http-method: GET +entity-manager: + request-path: /ngsi-ld/v1/entities + target-service: aio-runner + http-method: POST +spring: + application: + name: gateway + + +server: + port: 9090 + tomcat: + max: + threads:2000 + +eureka: + client: + register-with-eureka: true + fetchRegistry: true + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +zuul: + ignored-services: '*' + ribbon: + eager-load: + enabled: true + semaphore: + maxSemaphores: 60000 + host: + connect-timeout-millis: 60000 + socket-timeout-millis: 60000 + routes: + entity-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/entities/** + serviceId: aio-runner + stripPrefix: false + subscription-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/subscriptions/** + serviceId: aio-runner + stripPrefix: false + context-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceRegistrations/** + serviceId: aio-runner + stripPrefix: false + subforeg: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceSubscriptions/** + serviceId: aio-runner + stripPrefix: false + batchoperations: + sensitiveHeaders: + path: /ngsi-ld/v1/entityOperations/** + serviceId: aio-runner + stripPrefix: false + history-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/temporal/entities/** + serviceId: aio-runner + stripPrefix: false + atcontext-server: + sensitiveHeaders: + path: /ngsi-ld/contextes/** + serviceId: aio-runner + stripPrefix: false + infos: + sensitiveHeaders: + path: /scorpio/v1/info/** + serviceId: aio-runner + stripPrefix: false + health: + sensitiveHeaders: + path: /health/** + serviceId: aio-runner + stripPrefix: false + corecontext: + sensitiveHeaders: + path: /corecontext/** + serviceId: aio-runner + stripPrefix: false + test-manager: + sensitiveHeaders: + path: /test/** + serviceId: testmanager + stripPrefix: false +security: + active: false + oauth2: + client: + # for keycloak configure + #accessTokenUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/token + #userAuthorizationUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/auth + #clientId: authserver + #clientSecret: ae8c99a9-f98d-41e9-8fb2-d348acb987e0 + # for ketrock configure + accessTokenUri: http://172.30.64.120:3000/oauth2/token + userAuthorizationUri: http://172.30.64.120:3000/oauth2/authorize + clientId: a2034c11-d2a9-4cab-9fac-ff65425bd53f + clientSecret: 7364baec-6d6f-4307-8c71-d66e1e6c3afc + resource: + # for keycloak configure + #userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo + # for keycloak configure + userInfoUri: http://172.30.64.120:3000/user +logging: + level: + root: ERROR \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/gateway/src/main/resources/application-aio.yml b/scorpio-broker/SpringCloudModules/gateway/src/main/resources/application-aio.yml new file mode 100644 index 0000000000000000000000000000000000000000..376782300e505593cc48e9c84e65afec3d9c59f2 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/gateway/src/main/resources/application-aio.yml @@ -0,0 +1,137 @@ +# Increase the Hystrix timeout to 60s (globally) +hystrix: + command: + default: + execution: + isolation: + thread: + timeoutInMilliseconds: 60000 + + +#GET request configuration for QUERY-MANAGER +ribbon: + ReadTimeout: 60000 + ConnectTimeout: 60000 + + + +query-manager: + request-path: /ngsi-ld/v1/entities + target-service: aio-runner + http-method: GET +entity-manager: + request-path: /ngsi-ld/v1/entities + target-service: aio-runner + http-method: POST + +spring: + application: + name: gateway + + +server: + port: 9090 + tomcat: + max: + threads:2000 + +eureka: + client: + register-with-eureka: true + fetchRegistry: true + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +zuul: + ignored-services: '*' + ribbon: + eager-load: + enabled: true + semaphore: + maxSemaphores: 60000 + host: + connect-timeout-millis: 60000 + socket-timeout-millis: 60000 + routes: + entity-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/entities/** + serviceId: aio-runner + stripPrefix: false + subscription-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/subscriptions/** + serviceId: aio-runner + stripPrefix: false + context-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceRegistrations/** + serviceId: aio-runner + stripPrefix: false + subforeg: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceSubscriptions/** + serviceId: aio-runner + stripPrefix: false + batchoperations: + sensitiveHeaders: + path: /ngsi-ld/v1/entityOperations/** + serviceId: aio-runner + stripPrefix: false + history-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/temporal/entities/** + serviceId: aio-runner + stripPrefix: false + atcontext-server: + sensitiveHeaders: + path: /ngsi-ld/contextes/** + serviceId: aio-runner + stripPrefix: false + infos: + sensitiveHeaders: + path: /scorpio/v1/info/** + serviceId: aio-runner + stripPrefix: false + health: + sensitiveHeaders: + path: /health/** + serviceId: aio-runner + stripPrefix: false + version: + sensitiveHeaders: + path: /version/** + serviceId: aio-runner + stripPrefix: false + corecontext: + sensitiveHeaders: + path: /corecontext/** + serviceId: aio-runner + stripPrefix: false + test-manager: + sensitiveHeaders: + path: /test/** + serviceId: testmanager + stripPrefix: false +security: + active: false + oauth2: + client: + # for keycloak configure + #accessTokenUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/token + #userAuthorizationUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/auth + #clientId: authserver + #clientSecret: ae8c99a9-f98d-41e9-8fb2-d348acb987e0 + # for ketrock configure + accessTokenUri: http://172.30.64.120:3000/oauth2/token + userAuthorizationUri: http://172.30.64.120:3000/oauth2/authorize + clientId: a2034c11-d2a9-4cab-9fac-ff65425bd53f + clientSecret: 7364baec-6d6f-4307-8c71-d66e1e6c3afc + resource: + # for keycloak configure + #userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo + # for keycloak configure + userInfoUri: http://172.30.64.120:3000/user +logging: + level: + root: ERROR \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/gateway/src/main/resources/application-dist.yml b/scorpio-broker/SpringCloudModules/gateway/src/main/resources/application-dist.yml new file mode 100644 index 0000000000000000000000000000000000000000..20f430a3897982100dc740a192b9762a7117e1db --- /dev/null +++ b/scorpio-broker/SpringCloudModules/gateway/src/main/resources/application-dist.yml @@ -0,0 +1,10 @@ +server: + port: 9090 + +eureka: + client: + serviceUrl: + defaultZone: http://eureka:8761/eureka/ +logging: + level: + root: ERROR \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/gateway/src/main/resources/application.yml b/scorpio-broker/SpringCloudModules/gateway/src/main/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..54e5357fe60414b26da89df2a3eadd2ae60c2109 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/gateway/src/main/resources/application.yml @@ -0,0 +1,135 @@ + +# Increase the Hystrix timeout to 60s (globally) +hystrix: + command: + default: + execution: + isolation: + thread: + timeoutInMilliseconds: 60000 + + +#GET request configuration for QUERY-MANAGER +ribbon: + ReadTimeout: 60000 + ConnectTimeout: 60000 +server: + port: 9090 + tomcat: + max: + threads:2000 + + + +query-manager: + request-path: /ngsi-ld/v1/entities + target-service: QUERY-MANAGER + http-method: GET +entity-manager: + request-path: /ngsi-ld/v1/entities + target-service: aio-runner + http-method: POST +spring: + application: + name: gateway +eureka: + client: + register-with-eureka: true + fetchRegistry: true + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +# Configure the Authorization Server and User Info Resource Server details +security: + active: false + oauth2: + client: + # for keycloak configure + #accessTokenUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/token + #userAuthorizationUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/auth + #clientId: authserver + #clientSecret: ae8c99a9-f98d-41e9-8fb2-d348acb987e0 + # for ketrock configure + accessTokenUri: http://172.30.64.120:3000/oauth2/token + userAuthorizationUri: http://172.30.64.120:3000/oauth2/authorize + clientId: a2034c11-d2a9-4cab-9fac-ff65425bd53f + clientSecret: 7364baec-6d6f-4307-8c71-d66e1e6c3afc + resource: + # for keycloak configure + #userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo + # for keycloak configure + userInfoUri: http://172.30.64.120:3000/user +#Zuul routing rules for microservices. +zuul: + ignored-services: '*' + ribbon: + eager-load: + enabled: true + semaphore: + maxSemaphores: 60000 + host: + connect-timeout-millis: 60000 + socket-timeout-millis: 60000 + routes: + entity-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/entities/** + serviceId: ENTITY-MANAGER + stripPrefix: false + batchoperations: + sensitiveHeaders: + path: /ngsi-ld/v1/entityOperations/** + serviceId: ENTITY-MANAGER + stripPrefix: false + subscription-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/subscriptions/** + serviceId: SUBSCRIPTION-MANAGER + stripPrefix: false + context-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceRegistrations/** + serviceId: C-SOURCES + stripPrefix: false + subforeg: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceSubscriptions/** + serviceId: C-SOURCES + stripPrefix: false + history-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/temporal/entities/** + serviceId: HISTORY-MANAGER + stripPrefix: false + atcontext-server: + sensitiveHeaders: + path: /ngsi-ld/contextes/** + serviceId: ATCONTEXT-SERVER + stripPrefix: false + infos: + sensitiveHeaders: + path: /scorpio/v1/info/** + serviceId: STORAGE-MANAGER + stripPrefix: false + health: + sensitiveHeaders: + path: /health/** + serviceId: INFO-SERVER + stripPrefix: false + version: + sensitiveHeaders: + path: /version/** + serviceId: INFO-SERVER + stripPrefix: false + corecontext: + sensitiveHeaders: + path: /corecontext/** + serviceId: INFO-SERVER + stripPrefix: false + test-manager: + sensitiveHeaders: + path: /test/** + serviceId: testmanager +logging: + level: + root: ERROR \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/gateway/src/test/java/poc/nec/gateway/GatewayApplicationTests.java b/scorpio-broker/SpringCloudModules/gateway/src/test/java/poc/nec/gateway/GatewayApplicationTests.java new file mode 100644 index 0000000000000000000000000000000000000000..5b23e1ff18b2124f96845b8bc3a8b79d441a0668 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/gateway/src/test/java/poc/nec/gateway/GatewayApplicationTests.java @@ -0,0 +1,16 @@ +package poc.nec.gateway; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.junit4.SpringRunner; + +@RunWith(SpringRunner.class) +@SpringBootTest(properties= {"spring.main.allow-bean-definition-overriding=true"}) +public class GatewayApplicationTests { + + @Test + public void contextLoads() { + } + +} diff --git a/scorpio-broker/SpringCloudModules/pom.xml b/scorpio-broker/SpringCloudModules/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..7c5892f0073b5a5a1cd8768f75ae6c2eef0d94e9 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/pom.xml @@ -0,0 +1,23 @@ + + + 4.0.0 + springcloud + pom + + + eu.neclab.ngsildbroker + SpringCloudParent + 1.0.0-SNAPSHOT + ../SpringCloudParent + + 1.0.0-SNAPSHOT + + + gateway + eureka + config-server + + + + diff --git a/scorpio-broker/SpringCloudModules/spring-cloud-gateway/.gitignore b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..b83d22266ac8aa2f8df2edef68082c789727841d --- /dev/null +++ b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/scorpio-broker/SpringCloudModules/spring-cloud-gateway/.settings/org.eclipse.core.resources.prefs b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/.settings/org.eclipse.core.resources.prefs new file mode 100644 index 0000000000000000000000000000000000000000..839d647eef851c560a9854ff81d9caa1df594ced --- /dev/null +++ b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/.settings/org.eclipse.core.resources.prefs @@ -0,0 +1,5 @@ +eclipse.preferences.version=1 +encoding//src/main/java=UTF-8 +encoding//src/main/resources=UTF-8 +encoding//src/test/java=UTF-8 +encoding/=UTF-8 diff --git a/scorpio-broker/SpringCloudModules/spring-cloud-gateway/.settings/org.eclipse.jdt.core.prefs b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 0000000000000000000000000000000000000000..2f5cc74c3a8577df9faafd82992b0c62e56352be --- /dev/null +++ b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,8 @@ +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8 +org.eclipse.jdt.core.compiler.compliance=1.8 +org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled +org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning +org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=ignore +org.eclipse.jdt.core.compiler.release=disabled +org.eclipse.jdt.core.compiler.source=1.8 diff --git a/scorpio-broker/SpringCloudModules/spring-cloud-gateway/.settings/org.eclipse.m2e.core.prefs b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/.settings/org.eclipse.m2e.core.prefs new file mode 100644 index 0000000000000000000000000000000000000000..f897a7f1cb2389f85fe6381425d29f0a9866fb65 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/.settings/org.eclipse.m2e.core.prefs @@ -0,0 +1,4 @@ +activeProfiles= +eclipse.preferences.version=1 +resolveWorkspaceProjects=true +version=1 diff --git a/scorpio-broker/SpringCloudModules/spring-cloud-gateway/pom.xml b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..8ccf173f28b445921d6f09e32d72897a693908ca --- /dev/null +++ b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/pom.xml @@ -0,0 +1,125 @@ + + + 4.0.0 + + spring-cloud-gateway + jar + + spring-cloud-gateway + + + + eu.neclab.ngsildbroker + SpringCloudParent + 1.0.0-SNAPSHOT + ../../SpringCloudParent + + 1.0.0-SNAPSHOT + + + + + org.springframework.cloud + spring-cloud-starter-gateway + + + + + org.springframework.cloud + spring-cloud-starter-netflix-eureka-client + + + + + org.springframework.boot + spring-boot-starter-actuator + + + org.springframework.cloud + spring-cloud-starter-oauth2 + + + org.springframework.cloud + spring-cloud-starter-security + + + org.springframework.boot + spring-boot-starter-reactor-netty + + + + + + + org.springframework.cloud + spring-cloud-dependencies + ${spring-cloud.version} + pom + import + + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + + + + spring-snapshots + Spring Snapshots + https://repo.spring.io/snapshot + + true + + + + spring-milestones + Spring Milestones + https://repo.spring.io/milestone + + false + + + + + + docker + + + + com.spotify + dockerfile-maven-plugin + 1.4.12 + + + default + + build + push + + + + + dockerfile4maven + scorpiobroker/scorpio + ${project.artifactId}_${project.version} + + ${project.build.finalName}.jar + ${project.artifactId}.jar + + + + + + + + + + diff --git a/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/java/eu/neclab/ngsildbroker/gateway/SpringGatewayGatewayApplication.java b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/java/eu/neclab/ngsildbroker/gateway/SpringGatewayGatewayApplication.java new file mode 100644 index 0000000000000000000000000000000000000000..a2915be7fd49cebf2491e95a56e1bdf75c88a2f9 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/java/eu/neclab/ngsildbroker/gateway/SpringGatewayGatewayApplication.java @@ -0,0 +1,95 @@ +package eu.neclab.ngsildbroker.gateway; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.cloud.client.discovery.EnableDiscoveryClient; + +@SpringBootApplication +@EnableDiscoveryClient +public class SpringGatewayGatewayApplication { + public static void main(String[] args) { + SpringApplication.run(SpringGatewayGatewayApplication.class, args); + } +// @Value("${gateway.enablecors:false}") +// boolean enableCors; +// @Value("${gateway.enablecors.allowall:false}") +// boolean allowAllCors; +// +// @Value("${gateway.enablecors.allowedorigin:null}") +// String allowedOrigin; +// +// @Value("${gateway.enablecors.allowedheader:null}") +// String allowedHeader; +// +// @Value("${gateway.enablecors.allowallmethods:false}") +// boolean allowAllCorsMethods; +// +// @Value("${gateway.enablecors.allowedmethods:null}") +// String allowedMethods; +// +// +// @Bean +// public CorsFilter corsFilter() { +// final UrlBasedCorsConfigurationSource source = new UrlBasedCorsConfigurationSource(); +// if(!enableCors) { +// return new CorsFilter(source); +// } +// final CorsConfiguration config = new CorsConfiguration(); +// if(allowAllCors) { +// config.setAllowCredentials(true); +// config.addAllowedOrigin("*"); +// config.addAllowedHeader("*"); +// config.addAllowedMethod(HttpMethod.DELETE); +// config.addAllowedMethod(HttpMethod.POST); +// config.addAllowedMethod(HttpMethod.GET); +// config.addAllowedMethod(HttpMethod.OPTIONS); +// config.addAllowedMethod(HttpMethod.PATCH); +// config.addAllowedMethod(HttpMethod.PUT); +// config.addAllowedMethod(HttpMethod.HEAD); +// config.addAllowedMethod(HttpMethod.TRACE); +// source.registerCorsConfiguration("/**", config); +// }else { +// if(allowedOrigin != null) { +// for(String origin: allowedOrigin.split(",")) { +// config.addAllowedOrigin(origin); +// } +// +// } +// if(allowedHeader != null) { +// for(String header: allowedHeader.split(",")) { +// config.addAllowedHeader(header); +// } +// } +// if(allowAllCorsMethods) { +// config.addAllowedMethod(HttpMethod.DELETE); +// config.addAllowedMethod(HttpMethod.POST); +// config.addAllowedMethod(HttpMethod.GET); +// config.addAllowedMethod(HttpMethod.OPTIONS); +// config.addAllowedMethod(HttpMethod.PATCH); +// config.addAllowedMethod(HttpMethod.PUT); +// config.addAllowedMethod(HttpMethod.HEAD); +// config.addAllowedMethod(HttpMethod.TRACE); +// }else { +// if(allowedMethods != null) { +// for(String method: allowedMethods.split(",")) { +// config.addAllowedMethod(method); +// } +// } +// } +// } +// +// +// return new CorsFilter(source); +// } +// @Bean +// public ConfigurableServletWebServerFactory webServerFactory() { +// TomcatServletWebServerFactory factory = new TomcatServletWebServerFactory(); +// factory.addConnectorCustomizers(new TomcatConnectorCustomizer() { +// @Override +// public void customize(Connector connector) { +// connector.setProperty("relaxedQueryChars", "|{}[]"); +// } +// }); +// return factory; +// } +} diff --git a/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/application-aaio.yml b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/application-aaio.yml new file mode 100644 index 0000000000000000000000000000000000000000..4b01b82dccb0ad361715935beaa9d1b78e13d489 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/application-aaio.yml @@ -0,0 +1,112 @@ +# Increase the Hystrix timeout to 60s (globally) +hystrix: + command: + default: + execution: + isolation: + thread: + timeoutInMilliseconds: 60000 + + +#GET request configuration for QUERY-MANAGER +ribbon: + ReadTimeout: 60000 + ConnectTimeout: 60000 + + + +query-manager: + request-path: /ngsi-ld/v1/entities + target-service: aio-runner + http-method: GET + +spring: + application: + name: gateway + + +server: + port: 9090 + tomcat: + max: + threads:2000 + +eureka: + client: + register-with-eureka: true + fetchRegistry: true + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +zuul: + ignored-services: '*' + semaphore: + maxSemaphores: 60000 + host: + connect-timeout-millis: 60000 + socket-timeout-millis: 60000 + routes: + entity-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/entities/** + serviceId: aio-runner + stripPrefix: false + subscription-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/subscriptions/** + serviceId: aio-runner + stripPrefix: false + context-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceRegistrations/** + serviceId: aio-runner + stripPrefix: false + subforeg: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceSubscriptions/** + serviceId: aio-runner + stripPrefix: false + batchoperations: + sensitiveHeaders: + path: /ngsi-ld/v1/entityOperations/** + serviceId: aio-runner + stripPrefix: false + history-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/temporal/** + serviceId: aio-runner + stripPrefix: false + atcontext-server: + sensitiveHeaders: + path: /ngsi-ld/contextes/** + serviceId: aio-runner + stripPrefix: false + infos: + sensitiveHeaders: + path: /scorpio/v1/info/** + serviceId: aio-runner + stripPrefix: false + test-manager: + sensitiveHeaders: + path: /test/** + serviceId: testmanager + stripPrefix: false +security: + active: false + oauth2: + client: + # for keycloak configure + #accessTokenUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/token + #userAuthorizationUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/auth + #clientId: authserver + #clientSecret: ae8c99a9-f98d-41e9-8fb2-d348acb987e0 + # for ketrock configure + accessTokenUri: http://172.30.64.120:3000/oauth2/token + userAuthorizationUri: http://172.30.64.120:3000/oauth2/authorize + clientId: a2034c11-d2a9-4cab-9fac-ff65425bd53f + clientSecret: 7364baec-6d6f-4307-8c71-d66e1e6c3afc + resource: + # for keycloak configure + #userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo + # for keycloak configure + userInfoUri: http://172.30.64.120:3000/user diff --git a/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/application-aio.yml b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/application-aio.yml new file mode 100644 index 0000000000000000000000000000000000000000..762e2efec1a84766d05c4dc7f17635a58036d5bd --- /dev/null +++ b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/application-aio.yml @@ -0,0 +1,112 @@ +# Increase the Hystrix timeout to 60s (globally) +hystrix: + command: + default: + execution: + isolation: + thread: + timeoutInMilliseconds: 60000 + + +#GET request configuration for QUERY-MANAGER +ribbon: + ReadTimeout: 60000 + ConnectTimeout: 60000 + + + +query-manager: + request-path: /ngsi-ld/v1/entities + target-service: aio-runner + http-method: GET + +spring: + application: + name: gateway + + +server: + port: 9090 + tomcat: + max: + threads:2000 + +eureka: + client: + register-with-eureka: true + fetchRegistry: true + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +zuul: + ignored-services: '*' + semaphore: + maxSemaphores: 60000 + host: + connect-timeout-millis: 60000 + socket-timeout-millis: 60000 + routes: + entity-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/entities/** + serviceId: aio-runner + stripPrefix: false + subscription-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/subscriptions/** + serviceId: aio-runner + stripPrefix: false + context-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceRegistrations/** + serviceId: aio-runner + stripPrefix: false + subforeg: + sensitiveHeaders: + path: /ngsi-ld/v1/csourceSubscriptions/** + serviceId: aio-runner + stripPrefix: false + batchoperations: + sensitiveHeaders: + path: /ngsi-ld/v1/entityOperations/** + serviceId: aio-runner + stripPrefix: false + history-manager: + sensitiveHeaders: + path: /ngsi-ld/v1/temporal/** + serviceId: aio-runner + stripPrefix: false + atcontext-server: + sensitiveHeaders: + path: /ngsi-ld/contextes/** + serviceId: aio-runner + stripPrefix: false + infos: + sensitiveHeaders: + path: /scorpio/v1/info/** + serviceId: aio-runner + stripPrefix: false + test-manager: + sensitiveHeaders: + path: /test/** + serviceId: testmanager + stripPrefix: false +security: + active: false + oauth2: + client: + # for keycloak configure + #accessTokenUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/token + #userAuthorizationUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/auth + #clientId: authserver + #clientSecret: ae8c99a9-f98d-41e9-8fb2-d348acb987e0 + # for ketrock configure + accessTokenUri: http://172.30.64.120:3000/oauth2/token + userAuthorizationUri: http://172.30.64.120:3000/oauth2/authorize + clientId: a2034c11-d2a9-4cab-9fac-ff65425bd53f + clientSecret: 7364baec-6d6f-4307-8c71-d66e1e6c3afc + resource: + # for keycloak configure + #userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo + # for keycloak configure + userInfoUri: http://172.30.64.120:3000/user \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/application-dist.yml b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/application-dist.yml new file mode 100644 index 0000000000000000000000000000000000000000..d4c37dbc09d7ea9c6f90a4fc114a3c1b7d1c9d86 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/application-dist.yml @@ -0,0 +1,7 @@ +server: + port: 9090 + +eureka: + client: + serviceUrl: + defaultZone: http://eureka:8761/eureka/ \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/application.yml b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..69a856e97061f3066b602b36a3aa3bce12c73f64 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/application.yml @@ -0,0 +1,61 @@ + +# Increase the Hystrix timeout to 60s (globally) +hystrix: + command: + default: + execution: + isolation: + thread: + timeoutInMilliseconds: 60000 + + +#GET request configuration for QUERY-MANAGER +ribbon: + ReadTimeout: 60000 + ConnectTimeout: 60000 +server: + port: 9090 + tomcat: + max: + threads:2000 + + + +eureka: + client: + register-with-eureka: true + fetchRegistry: true + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +# Configure the Authorization Server and User Info Resource Server details +security: + active: false + oauth2: + client: + # for keycloak configure + #accessTokenUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/token + #userAuthorizationUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/auth + #clientId: authserver + #clientSecret: ae8c99a9-f98d-41e9-8fb2-d348acb987e0 + # for ketrock configure + accessTokenUri: http://172.30.64.120:3000/oauth2/token + userAuthorizationUri: http://172.30.64.120:3000/oauth2/authorize + clientId: a2034c11-d2a9-4cab-9fac-ff65425bd53f + clientSecret: 7364baec-6d6f-4307-8c71-d66e1e6c3afc + resource: + # for keycloak configure + #userInfoUri: http://10.0.4.33:8080/auth/realms/mykeycloak/protocol/openid-connect/userinfo + # for keycloak configure + userInfoUri: http://172.30.64.120:3000/user + +spring: + application: + name: sc-gateway + cloud: + gateway: + routes: + - id: websocket_route + uri: lb:ws://SUBSCRIPTION-MANAGER + predicates: + - Path=/websocket/** \ No newline at end of file diff --git a/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/eu/neclab/ngsildbroker/gateway/config/SiteSecurityConfigurer.java b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/eu/neclab/ngsildbroker/gateway/config/SiteSecurityConfigurer.java new file mode 100644 index 0000000000000000000000000000000000000000..257c6f31dac3c74633adac8090917c2ed1d6ec07 --- /dev/null +++ b/scorpio-broker/SpringCloudModules/spring-cloud-gateway/src/main/resources/eu/neclab/ngsildbroker/gateway/config/SiteSecurityConfigurer.java @@ -0,0 +1,43 @@ +package eu.neclab.ngsildbroker.gateway.config; + +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.security.oauth2.client.EnableOAuth2Sso; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; + +@Configuration +@EnableOAuth2Sso +@EnableWebSecurity +public class SiteSecurityConfigurer extends WebSecurityConfigurerAdapter { + + @Value("${security.active}") + private String securityEnabled; + + @Override + protected void configure(HttpSecurity http) throws Exception { + if (securityEnabled.equalsIgnoreCase("true")) { + http.antMatcher("/**") + .authorizeRequests() + .antMatchers("/", "/webjars/**") + .permitAll() + .anyRequest() + .authenticated() + .and() + .logout() + .logoutSuccessUrl("/") + .permitAll() + .and() + .csrf() + .disable(); + } else { + http.antMatcher("/**") + .authorizeRequests().antMatchers("/", "/webjars/**") + .permitAll() + .and() + .csrf() + .disable(); + } + } +} diff --git a/scorpio-broker/SpringCloudParent/.gitignore b/scorpio-broker/SpringCloudParent/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..a1c3ab4d08c0f9f91918f21c730272a4711885e8 --- /dev/null +++ b/scorpio-broker/SpringCloudParent/.gitignore @@ -0,0 +1,4 @@ +/target/ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/SpringCloudParent/pom.xml b/scorpio-broker/SpringCloudParent/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..941d80e514af8bbe8c7fd8b871f3d863bbd9c2ff --- /dev/null +++ b/scorpio-broker/SpringCloudParent/pom.xml @@ -0,0 +1,48 @@ + + 4.0.0 + + SpringCloudParent + pom + + eu.neclab.ngsildbroker + OverallParent + 1.0.0-SNAPSHOT + ../OverallParent + + 1.0.0-SNAPSHOT + + + + + + + + + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + + + + org.springframework.boot + spring-boot-maven-plugin + + + + repackage + + + + + + + + \ No newline at end of file diff --git a/scorpio-broker/Storage/.gitignore b/scorpio-broker/Storage/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..a1c3ab4d08c0f9f91918f21c730272a4711885e8 --- /dev/null +++ b/scorpio-broker/Storage/.gitignore @@ -0,0 +1,4 @@ +/target/ +/.settings/ +.classpath +.project diff --git a/scorpio-broker/Storage/StorageManager/.gitignore b/scorpio-broker/Storage/StorageManager/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..a05b0a82e17575876aec91bb616f270df6c39ca4 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/.gitignore @@ -0,0 +1,4 @@ +/target/ +/.settings/ +.classpath +.project \ No newline at end of file diff --git a/scorpio-broker/Storage/StorageManager/db-scripts/all_datatypes.jsonld b/scorpio-broker/Storage/StorageManager/db-scripts/all_datatypes.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..67c7178d4935e6d5169dcd6ef57e996a09a64740 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/db-scripts/all_datatypes.jsonld @@ -0,0 +1,167 @@ +{ + "@context": [ + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + { + "numberExample": "http://example.org/numberExample", + "stringExample": "http://example.org/stringExample", + "dateTimeExample": { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@id": "http://example.org/dateTimeExample" + }, + "observedAtDateTimeExample": "http://example.org/observedAtDateTimeExample", + "dateExample": { + "@type": "https://uri.etsi.org/ngsi-ld/Date", + "@id": "http://example.org/dateExample" + }, + "timeExample": { + "@type": "https://uri.etsi.org/ngsi-ld/Time", + "@id": "http://example.org/timeExample" + }, + "otherValueExample": "http://example.org/otherValueExample", + "trueExample": "http://example.org/trueExample", + "falseExample": "http://example.org/falseExample", + "uriExample": "http://example.org/uriExample", + "arrayExample": "http://example.org/arrayExample", + "topLevelExample": "http://example.org/topLevelExample", + "subPropertyExample": "http://example.org/subPropertyExample", + "manyLevelsExample": "http://example.org/manyLevelsExample", + "sub1": { + "@type": "@id", + "@id": "http://example.org/sub1" + }, + "sub2": "http://example.org/sub2", + "sub3": "http://example.org/sub3", + "relationshipExample": { + "@type": "@id", + "@id": "http://example.org/relationshipExample" + }, + "objectExample": "http://example.org/objectExample", + "streetAddress": "https://example.org/streetAddress", + "addressRegion": "https://example.org/addressRegion", + "addressLocality": "https://example.org/addressLocality", + "postalCode": "https://example.org/postalCode", + "multiLevelObjectExample": "http://example.org/multiLevelObjectExample", + "streetName": "https://example.org/streetName", + "houseNumber": "https://example.org/houseNumber", + "area": "https://example.org/area" + } + ], + "id": "urn:ngsi-ld:Test:all_datatypes", + "type": "urn:ngsi-ld:Test", + "numberExample": { + "type": "Property", + "value": 100 + }, + "stringExample": { + "type": "Property", + "value": "Mercedes" + }, + "dateTimeExample": { + "type": "Property", + "value": { + "@type": "DateTime", + "@value": "2018-12-04T12:00:00Z" + } + }, + "observedAtDateTimeExample": { + "type": "Property", + "value": "Foo", + "observedAt": "2018-12-04T12:00:00Z" + }, + "dateExample": { + "type": "Property", + "value": { + "@type": "Date", + "@value": "2018-12-04" + } + }, + "timeExample": { + "type": "Property", + "value": { + "@type": "Time", + "@value": "12:00:00Z" + } + }, + "otherValueExample": { + "type": "Property", + "value": true + }, + "trueExample": { + "type": "Property", + "value": true + }, + "falseExample": { + "type": "Property", + "value": false + }, + "uriExample": { + "type": "Property", + "value": "http://www.example.com" + }, + "objectExample": { + "type": "Property", + "value": { + "streetAddress": "Main Street 65", + "addressRegion": "Metropolis", + "addressLocality": "Duck Village", + "postalCode": 42000 + } + }, + "multiLevelObjectExample": { + "type": "Property", + "value": { + "streetAddress": { + "streetName": "Main Street", + "houseNumber": 65 + } + } + }, + "arrayExample": { + "type": "Property", + "value": [ + 999, + true, + ["a", "b"], + "Foo", + { + "streetAddress": "Franklinstrasse" + } + ] + }, + "topLevelExample": { + "type": "Property", + "value": 10, + "subPropertyExample": { + "type": "Property", + "value": 5 + } + }, + "manyLevelsExample": { + "type": "Property", + "value": "A", + "sub1": { + "type": "Relationship", + "object": "urn:ngsi-ld:B", + "sub2": { + "type": "Property", + "value": "C", + "sub3": { + "type": "Property", + "value": "D" + } + } + } + }, + "relationshipExample": { + "type": "Relationship", + "object": "urn:ngsi-ld:relationshipExample" + }, + "location":{ + "type":"GeoProperty", + "value":"{ \"type\":\"Point\", \"coordinates\":[ -8.5, 41.2 ] }" + }, + "area":{ + "type":"GeoProperty", + "value":"{ \"type\":\"Polygon\", \"coordinates\": [ [ [ 7.6025390625, 47.70976154266637 ], [ 12.94189453125, 47.90161354142077 ], [ 9.33837890625, 54.85131525968606 ], [ 7.6025390625, 47.70976154266637 ] ] ] }" + } +} \ No newline at end of file diff --git a/scorpio-broker/Storage/StorageManager/db-scripts/dml-examples.sql b/scorpio-broker/Storage/StorageManager/db-scripts/dml-examples.sql new file mode 100644 index 0000000000000000000000000000000000000000..e8f0d8219ca30d58166e64f71cdfa7a73ccf21ab --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/db-scripts/dml-examples.sql @@ -0,0 +1,811 @@ +begin; + +truncate entity; + +-- source: vehicle.jsonld +insert into Entity (id, data) values ('urn:ngsi-ld:Vehicle:A4567', +' +{ + "@id": "urn:ngsi-ld:Vehicle:A4567", + "http://example.org/vehicle/brandName": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "Mercedes" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2017-07-29T12:00:04" + } + ], + "http://example.org/common/isParked": [ + { + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:OffStreetParking:Downtown1" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2017-07-29T12:00:04" + } + ], + "http://example.org/common/providedBy": [ + { + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:Person:Bob" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ] + } + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\":\"Point\", \"coordinates\":[ -8.5, 41.2 ] }" + } + ] + } + ], + "http://example.org/vehicle/speed": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 80 + } + ] + } + ], + "@type": [ + "http://example.org/vehicle/Vehicle" + ] +} +' +); + +-- testing json types... +-- source: all_datatypes.jsonld +insert into Entity (id, data) values ('urn:ngsi-ld:Test:all_datatypes', +' + { + "http://example.org/arrayExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 999 + }, + { + "@value": true + }, + { + "@value": "a" + }, + { + "@value": "b" + }, + { + "@value": "Foo" + }, + { + "https://example.org/streetAddress": [ + { + "@value": "Franklinstrasse" + } + ] + } + ] + } + ], + "http://example.org/dateExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/Date", + "@value": "2018-12-04" + } + ] + } + ], + "http://example.org/dateTimeExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-12-04T12:00:00Z" + } + ] + } + ], + "http://example.org/falseExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": false + } + ] + } + ], + "@id": "urn:ngsi-ld:Test:all_datatypes", + "http://example.org/manyLevelsExample": [ + { + "http://example.org/sub1": [ + { + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:B" + } + ], + "http://example.org/sub2": [ + { + "http://example.org/sub3": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "D" + } + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "C" + } + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "A" + } + ] + } + ], + "http://example.org/multiLevelObjectExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "https://example.org/streetAddress": [ + { + "https://example.org/houseNumber": [ + { + "@value": 65 + } + ], + "https://example.org/streetName": [ + { + "@value": "Main Street" + } + ] + } + ] + } + ] + } + ], + "http://example.org/numberExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 100 + } + ] + } + ], + "http://example.org/objectExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "https://example.org/addressLocality": [ + { + "@value": "Duck Village" + } + ], + "https://example.org/addressRegion": [ + { + "@value": "Metropolis" + } + ], + "https://example.org/postalCode": [ + { + "@value": 42000 + } + ], + "https://example.org/streetAddress": [ + { + "@value": "Main Street 65" + } + ] + } + ] + } + ], + "http://example.org/observedAtDateTimeExample": [ + { + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2018-12-04T12:00:00Z" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "Foo" + } + ] + } + ], + "http://example.org/otherValueExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": true + } + ] + } + ], + "http://example.org/relationshipExample": [ + { + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:relationshipExample" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ] + } + ], + "http://example.org/stringExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "Mercedes" + } + ] + } + ], + "http://example.org/timeExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/Time", + "@value": "12:00:00Z" + } + ] + } + ], + "http://example.org/topLevelExample": [ + { + "http://example.org/subPropertyExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 5 + } + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 10 + } + ] + } + ], + "http://example.org/trueExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": true + } + ] + } + ], + "@type": [ + "urn:ngsi-ld:Test" + ], + "http://example.org/uriExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "http://www.example.com" + } + ] + } + ] + } + +' +); + +-- mixing properties, relationships and types +-- here attr1 is a property and attr2 is a relationship +insert into Entity (id, data) values ('urn:ngsi-ld:Test:entity2', +'{ + "@id": "urn:ngsi-ld:Test:entity2", + "@type": [ + "https://json-ld.org/playground/Test" + ], + "http://example.org/attr1": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "urn:ngsi-ld:test" + } + ] + } + ], + "http://example.org/attr2": [ + { + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:test" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ] + } + ] + }' +); + +-- here attr2 is a property and attr1 is a relationship +insert into Entity (id, data) values ('urn:ngsi-ld:Test:entity3', +'{ + "@id": "urn:ngsi-ld:Test:entity3", + "@type": [ + "https://json-ld.org/playground/Test" + ], + "http://example.org/attr1": [ + { + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:test" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ] + } + ], + "http://example.org/attr2": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "urn:ngsi-ld:test" + } + ] + } + ] + } +' +); + +-- here attr1 has an invalid ngsi-ld type (should not exist in the database) +insert into Entity (id, data) values ('urn:ngsi-ld:Test:entity4', +'{ + "@id": "urn:ngsi-ld:Test:entity4", + "@type": [ + "https://json-ld.org/playground/Test" + ], + "http://example.org/attr1": [ + { + "@type": [ + "https://json-ld.org/playground/Invalid" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "urn:ngsi-ld:test" + } + ] + } + ] + }' +); + +-- here attr1 is a property and stores a number +insert into Entity (id, data) values ('urn:ngsi-ld:Test:entity5', +'{ + "@id": "urn:ngsi-ld:Test:entity5", + "@type": [ + "https://json-ld.org/playground/Test" + ], + "http://example.org/attr1": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 10 + } + ] + } + ] + }' +); + +-- geographical +-- "@context": "http://forge.etsi.org/gitlab/NGSI-LD/NGSI-LD/raw/master/coreContext/ngsi-ld-core-context.jsonld", + +insert into Entity (id, data) values ('urn:ngsi-ld:Test:NecLabsHeidelberg', +'{ + "@id": "urn:ngsi-ld:Test:NecLabsHeidelberg", + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\": \"Point\", \"coordinates\": [ 8.684783577919006, 49.406131991436396 ] }" + } + ] + } + ], + "@type": [ + "https://json-ld.org/playground/Test" + ] + }' +); + +insert into Entity (id, data) values ('urn:ngsi-ld:Test:AldiBergheim', +'{ + "@id": "urn:ngsi-ld:Test:AldiBergheim", + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": " { \"type\": \"Point\", \"coordinates\": [ 8.689231, 49.407524 ] }" + } + ] + + } + ], + "http://example.org/parkingLotLocation": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\": \"Point\", \"coordinates\": [ 8.688387, 49.407354 ] }" + } + ] + } + ], + "@type": [ + "https://json-ld.org/playground/Test" + ] + }' +); + +insert into Entity (id, data) values ('urn:ngsi-ld:Test:ReweDasCarre', +'{ + "@id": "urn:ngsi-ld:Test:ReweDasCarre", + "@type": [ + "https://json-ld.org/playground/Test" + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\": \"Point\", \"coordinates\": [ 8.691790, 49.407366 ] }" + } + ] + } + ] + }' +); + +insert into Entity (id, data) values ('urn:ngsi-ld:Test:KarlsruherInstitutFurTechnologie', +'{ + "@id": "urn:ngsi-ld:Test:KarlsruherInstitutFurTechnologie", + "@type": [ + "https://json-ld.org/playground/Test" + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\": \"Point\", \"coordinates\": [ 8.416891, 49.011942 ] }" + } + ] + } + ] + }' +); + +insert into Entity (id, data) values ('urn:ngsi-ld:Test:HeidelbergCity', +'{ + "@id": "urn:ngsi-ld:Test:HeidelbergCity", + "@type": [ + "https://json-ld.org/playground/Test" + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": + "{ \"type\": \"Polygon\", \"coordinates\": [[[8.686752319335938,49.359122687528746],[8.742027282714844,49.3642654834877],[8.767433166503904,49.398462568451485],[8.768119812011719,49.42750021620163],[8.74305725097656,49.44781634951542],[8.669242858886719,49.43754770762113],[8.63525390625,49.41968407776289],[8.637657165527344,49.3995797187007],[8.663749694824219,49.36851347448498],[8.686752319335938,49.359122687528746]]] }" + } + ] + } + ] + }' +); + +insert into Entity (id, data) values ('urn:ngsi-ld:Test:PathToNecParkingLot', +'{ + "@id": "urn:ngsi-ld:Test:PathToNecParkingLot", + "@type": [ + "https://json-ld.org/playground/Test" + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": + "{ \"type\": \"LineString\", \"coordinates\": [[8.684746026992798,49.406154680259746],[8.68467628955841,49.40620703904283],[8.684748709201813,49.40628732240177],[8.684737980365753,49.40635364333839]] }" + } + ] + } + ] + }' +); + +insert into Entity (id, data) values ('urn:ngsi-ld:Test:NecLabsHeidelbergBuilding', +'{ + "@id": "urn:ngsi-ld:Test:NecLabsHeidelbergBuilding", + "@type": [ + "https://json-ld.org/playground/Test" + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": + "{ \"type\": \"Polygon\", \"coordinates\": [[[8.684628009796143,49.406062179606515],[8.685507774353027,49.4062262372493],[8.68545413017273,49.40634491690448],[8.684579730033875,49.40617736907259],[8.684628009796143,49.406062179606515]]] }" + } + ] + } + + ] + }' +); + +insert into Entity (id, data) values ('urn:ngsi-ld:Test:DroneAreaCoverage', +'{ + "@id": "urn:ngsi-ld:Test:DroneAreaCoverage", + "@type": [ + "https://json-ld.org/playground/Test" + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\": \"Polygon\", \"coordinates\": [[[8.685226142406464,49.406259397770064],[8.685590922832489,49.406259397770064],[8.685590922832489,49.40641472833234],[8.685226142406464,49.40641472833234],[8.685226142406464,49.406259397770064]]] }" + } + ] + } + ] + }' +); + + + +insert into Entity (id, data) values ('urn:ngsi-ld:Test:GeometryTypes', +'{ + "@id": "urn:ngsi-ld:Test:GeometryTypes", + "@type": [ + "https://json-ld.org/playground/Test" + ], + "http://example.org/lineStringExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": + "{ \"type\": \"LineString\", \"coordinates\": [ [100.0, 0.0], [101.0, 1.0] ] }" + } + ] + } + ], + "http://example.org/multiLineStringExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": + "{ \"type\": \"MultiLineString\", \"coordinates\": [ [ [100.0, 0.0], [101.0, 1.0] ], [ [102.0, 2.0], [103.0, 3.0] ] ] }" + } + ] + } + ], + "http://example.org/multiPointsExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": + "{ \"type\": \"MultiPoint\", \"coordinates\": [ [100.0, 0.0], [101.0, 1.0] ] }" + } + ] + } + ], + "http://example.org/multiPolygonExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": + "{ \"type\": \"MultiPolygon\", \"coordinates\": [ [ [ [102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0] ] ], [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ], [ [100.2, 0.2], [100.2, 0.8], [100.8, 0.8], [100.8, 0.2], [100.2, 0.2] ] ] ] }" + } + ] + } + ], + "http://example.org/pointExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": + "{ \"type\": \"Point\", \"coordinates\": [100.0, 0.0] }" + } + ] + } + ], + "http://example.org/polygonExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": + "{ \"type\": \"Polygon\", \"coordinates\": [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ] ] }" + } + ] + } + ], + "http://example.org/polygonWithHolesExample": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": + "{ \"type\": \"Polygon\", \"coordinates\": [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ], [ [100.8, 0.8], [100.8, 0.2], [100.2, 0.2], [100.2, 0.8], [100.8, 0.8] ] ] }" + } + ] + } + ] + +}' +); + + + +commit; + diff --git a/scorpio-broker/Storage/StorageManager/db-scripts/geojson-io.sql b/scorpio-broker/Storage/StorageManager/db-scripts/geojson-io.sql new file mode 100644 index 0000000000000000000000000000000000000000..352a06fa7f0faad75111721b61f5fcc231e7e71d --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/db-scripts/geojson-io.sql @@ -0,0 +1,16 @@ +\pset pager 0 +\a +\t +\pset recordsep ' , ' + +\echo This SQL script generates a FeatureCollection to graphically visualize all entity geometries (based on "location" attribute) +\echo Just copy the json below and paste at http://geojson.io +\echo ------------------------ +\echo '{ "type": "FeatureCollection", "features": [' +select '{"type": "Feature", + "properties": { }, + "geometry": ' || coalesce((data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')::text, 'null') || '}' as g + from entity + where data @> '{"https://uri.etsi.org/ngsi-ld/location": [{"@type":["https://uri.etsi.org/ngsi-ld/GeoProperty"]}] }'; + +\echo '] }' diff --git a/scorpio-broker/Storage/StorageManager/db-scripts/how-to.txt b/scorpio-broker/Storage/StorageManager/db-scripts/how-to.txt new file mode 100644 index 0000000000000000000000000000000000000000..32cc9d90ad23fe0d6eb554442ab1f08162bd8f52 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/db-scripts/how-to.txt @@ -0,0 +1,15 @@ +NGB Database how-to for Ubuntu +Tested in Ubuntu 18.04 + +1. Install PostgreSQL 10 +$ sudo apt install postgresql-10 postgresql-client-10 postgresql-server-dev-10 + +2. Install PostGIS +$ sudo apt install postgresql-10-postgis-2.4 postgresql-10-postgis-scripts + +3. Create ngb user +$ sudo su - postgres +$ psql -c "create user ngb superuser createdb password 'ngb'"; + +4. Create ngb database +$ psql -c "create database ngb owner = ngb"; diff --git a/scorpio-broker/Storage/StorageManager/db-scripts/ngsild-geoquery-language-mapping.sql b/scorpio-broker/Storage/StorageManager/db-scripts/ngsild-geoquery-language-mapping.sql new file mode 100644 index 0000000000000000000000000000000000000000..41c4e77b9a0842c856df88ecb0baddedb37a0b00 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/db-scripts/ngsild-geoquery-language-mapping.sql @@ -0,0 +1,281 @@ +\pset pager 0 +-- \set ECHO queries + +/* +andOp = %x3B ; ; +equal = %x3D %x3D ; == +georel = nearRel / withinRel / containsRel / overlapsRel / intersectsRel / equalsRel / disjointRel +nearRel = nearOp andOp distance equal PositiveNumber ; near;max(min)Distance==x (in meters) +distance = "maxDistance" / "minDistance" +nearOp = "near" +withinRel = "within" +containsRel = "contains" +intersectsRel = "intersects" +equalsRel = "equals" +disjointRel = "disjoint" +overlapsRel = "overlaps" + +PositiveNumber shall be a non-zero positive number as mandated by the JSON Specification. Thus, it shall follow the ABNF Grammar, production rule named Number, section 6 of [6], excluding the minus' symbol and excluding the number 0. + +--- +Reference geometries shall be specified by: +- A geometry type (parameter name geometry) as defined by the GeoJSON specification ([8], section 1.4), except GeometryCollection. + +[8] 1.4: + the term "geometry type" refers to seven + case-sensitive strings: "Point", "MultiPoint", "LineString", + "MultiLineString", "Polygon", "MultiPolygon", and + "GeometryCollection". + + +- A coordinates (parameter name coordinates) element which shall represent the coordinates of the reference geometry as mandated by [8], section 3.1.1. + +[8] 3.1.1 +A position is the fundamental geometry construct. The "coordinates" member of a Geometry object is composed of either: + - one position in the case of a Point geometry, + - an array of positions in the case of a LineString or MultiPoint + geometry, + - an array of LineString or linear ring (see Section 3.1.6) + coordinates in the case of a Polygon or MultiLineString geometry, + or + - an array of Polygon coordinates in the case of a MultiPolygon + geometry. +A position is an array of numbers. There MUST be two or more + elements. The first two elements are longitude and latitude, or + easting and northing, precisely in that order and using decimal + numbers. + +- near statement + ... with distance (in meters) ... + +*/ + +-- testing all GeoJSON geometry types in PostGis +\x +select id, + ST_AsText( ST_GeomFromGeoJSON( data#>>'{http://example.org/pointExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') ) as point, + ST_AsText( ST_GeomFromGeoJSON( data#>>'{http://example.org/lineStringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') ) as lineString, + ST_AsText( ST_GeomFromGeoJSON( data#>>'{http://example.org/polygonExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') ) as polygonExample, + ST_AsText( ST_GeomFromGeoJSON( data#>>'{http://example.org/polygonWithHolesExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') ) as polygonWithHolesExample, + ST_AsText( ST_GeomFromGeoJSON( data#>>'{http://example.org/multiPointsExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') ) as multiPointsExample, + ST_AsText( ST_GeomFromGeoJSON( data#>>'{http://example.org/multiLineStringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') ) as multiLineStringExample, + ST_AsText( ST_GeomFromGeoJSON( data#>>'{http://example.org/multiPolygonExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') ) as multiPolygonExample +from entity +where id = 'urn:ngsi-ld:Test:GeometryTypes'; +\x + + +\echo Distance between NEC Labs Heidelberg and AldiBergheim +with aux as (select location from entity where id = 'urn:ngsi-ld:Test:NecLabsHeidelberg' limit 1) +SELECT st_distance(aux.location::geography, entity.location::geography) + FROM entity, aux + where id = 'urn:ngsi-ld:Test:AldiBergheim' ; + + +\echo case 1 +\echo georel = nearRel +\echo distance = "maxDistance" +\echo get every entity near NEC Labs Heidelberg, with distance up to 360 meters +\echo NEC Labs Heidelberg coordinates: 8.684783577919006, 49.406131991436396 +\echo geoproperty=location&georel=near;maxDistance==360&geometry=Point&coordinates=%5B8.684783577919006%2C49.406131991436396%5D +select id, + data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as attr, + ST_AsText( ST_GeomFromGeoJSON( data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ) ) as geovalue_text + from entity + where + data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }' and + ST_DWithin( + ST_GeomFromGeoJSON( data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')::geography, + ST_GeomFromGeoJSON( '{ + "type": "Point", + "coordinates": [ + 8.684783577919006, 49.406131991436396 + ] + }')::geography, + 360 + ); + + +\echo as "location" is the field, a fastest version can be written as +select id, + location as attr, + ST_AsText( location ) as geovalue_text + from entity + where + ST_DWithin( + location::geography, + ST_GeomFromGeoJSON( '{ + "type": "Point", + "coordinates": [ + 8.684783577919006, 49.406131991436396 + ] + }')::geography, + 360 + ); + + + +\echo case 1.1 +\echo Polygon geometry +\echo get every entity near NEC building (polygon), with distance up to 360 meters +\echo NEC Labs Heidelberg building polygon coordinates: +\echo [[[8.684628009796143,49.406062179606515],[8.685507774353027,49.4062262372493],[8.68545413017273,49.40634491690448],[8.684579730033875,49.40617736907259],[8.684628009796143,49.406062179606515]]] +\echo geoproperty=location&georel=near;maxDistance==50&geometry=Polygon&coordinates=%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D +select id, + data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as attr, + ST_AsText( ST_GeomFromGeoJSON( data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ) ) as geovalue_text + from entity + where + data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }' and + ST_DWithin( + ST_GeomFromGeoJSON( data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' )::geography, + ST_GeomFromGeoJSON( '{ + "type": "Polygon", + "coordinates": [[[8.684628009796143,49.406062179606515],[8.685507774353027,49.4062262372493],[8.68545413017273,49.40634491690448],[8.684579730033875,49.40617736907259],[8.684628009796143,49.406062179606515]]] + }')::geography, + 360 + ); + + +\echo case 2 +\echo georel = nearRel +\echo get every entity near NEC Labs Heidelberg, with distance over 400 meters +\echo distance = "minDistance" +\echo geoproperty=location&georel=near;minDistance==400&geometry=Point&coordinates=%5B8.684783577919006%2C49.406131991436396%5D +select id, + data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as attr + from entity + where + data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }' and + NOT ST_DWithin( + ST_GeomFromGeoJSON( data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' )::geography, + ST_GeomFromGeoJSON( '{ + "type": "Point", + "coordinates": [ + 8.684783577919006, 49.406131991436396 + ] + }')::geography, + 400 + ); + + +\echo case 3 +\echo georel = withinRel +\echo get every entity within NEC Labs building (polygon) in Heidelberg +\echo geoproperty=location&georel=within&geometry=Polygon&coordinates=%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D +select id, + data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as attr + from entity + where + data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }' and + ST_Within( + ST_GeomFromGeoJSON( data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'), + ST_GeomFromGeoJSON( '{ + "type": "Polygon", + "coordinates": [[[8.684628009796143,49.406062179606515],[8.685507774353027,49.4062262372493],[8.68545413017273,49.40634491690448],[8.684579730033875,49.40617736907259],[8.684628009796143,49.406062179606515]]] + }') + ); + +\echo case 3.1 +\echo georel = withinRel +\echo get every entity within Heidelberg or Karlsruhe ( MultiPolygon ) +\echo geoproperty=location&georel=within&geometry=MultiPolygon&coordinates=%5B%5B%5B%5B8.686752319335938%2C49.359122687528746%5D%2C%5B8.742027282714844%2C49.3642654834877%5D%2C%5B8.767433166503904%2C49.398462568451485%5D%2C%5B8.768119812011719%2C49.42750021620163%5D%2C%5B8.74305725097656%2C49.44781634951542%5D%2C%5B8.669242858886719%2C49.43754770762113%5D%2C%5B8.63525390625%2C49.41968407776289%5D%2C%5B8.637657165527344%2C49.3995797187007%5D%2C%5B8.663749694824219%2C49.36851347448498%5D%2C%5B8.686752319335938%2C49.359122687528746%5D%5D%5D%2C%5B%5B%5B8.364715576171875%2C48.96939999849952%5D%2C%5B8.47320556640625%2C48.982019588328214%5D%2C%5B8.485565185546875%2C49.017157315497165%5D%2C%5B8.411407470703125%2C49.05677012268616%5D%2C%5B8.33587646484375%2C49.031565622700356%5D%2C%5B8.320770263671875%2C48.98562459864604%5D%2C%5B8.364715576171875%2C48.96939999849952%5D%5D%5D%5D +select id, + data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as attr + from entity + where + data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }' and + ST_Within( + ST_GeomFromGeoJSON( data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'), + ST_GeomFromGeoJSON( '{ + "type": "MultiPolygon", + "coordinates": [[[[8.686752319335938,49.359122687528746],[8.742027282714844,49.3642654834877],[8.767433166503904,49.398462568451485],[8.768119812011719,49.42750021620163],[8.74305725097656,49.44781634951542],[8.669242858886719,49.43754770762113],[8.63525390625,49.41968407776289],[8.637657165527344,49.3995797187007],[8.663749694824219,49.36851347448498],[8.686752319335938,49.359122687528746]]],[[[8.364715576171875,48.96939999849952],[8.47320556640625,48.982019588328214],[8.485565185546875,49.017157315497165],[8.411407470703125,49.05677012268616],[8.33587646484375,49.031565622700356],[8.320770263671875,48.98562459864604],[8.364715576171875,48.96939999849952]]]] + }') + ); + +\echo case 4 +\echo georel = containsRel +\echo get every entity that contains NEC Labs Heidelberg (point) +\echo geoproperty=location&georel=contains&geometry=Point&coordinates=%5B8.684783577919006%2C49.406131991436396%5D +select id, + data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as attr + from entity + where + data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }' and + ST_Contains( + ST_GeomFromGeoJSON( data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'), + ST_GeomFromGeoJSON( '{ + "type": "Point", + "coordinates": [8.684783577919006,49.406131991436396] + }') + ); + +\echo case 5 +\echo georel = overlapsRel +\echo get every entity that overlaps with NEC Labs building (polygon) in Heidelberg +\echo geoproperty=location&georel=overlaps&geometry=Polygon&coordinates=%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D +select id, + data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as attr + from entity + where + data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }' and + ST_Overlaps( + ST_GeomFromGeoJSON( data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'), + ST_GeomFromGeoJSON( '{ + "type": "Polygon", + "coordinates": [[[8.684628009796143,49.406062179606515],[8.685507774353027,49.4062262372493],[8.68545413017273,49.40634491690448],[8.684579730033875,49.40617736907259],[8.684628009796143,49.406062179606515]]] + }') + ); + +\echo case 6 +\echo georel = intersectsRel +\echo get every entity that intersects with NEC Labs building (polygon) in Heidelberg +-- ST_Intersects(g1, g2 ) --> Not (ST_Disjoint(g1, g2 )) +\echo geoproperty=location&georel=overlaps&geometry=Polygon&coordinates=%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D +select id, + data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as attr + from entity + where + data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }' and + ST_Intersects( + ST_GeomFromGeoJSON( data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'), + ST_GeomFromGeoJSON( '{ + "type": "Polygon", + "coordinates": [[[8.684628009796143,49.406062179606515],[8.685507774353027,49.4062262372493],[8.68545413017273,49.40634491690448],[8.684579730033875,49.40617736907259],[8.684628009796143,49.406062179606515]]] + }') + ); + +\echo case 7 +\echo georel = equalsRel +\echo get every entity equal to NEC Labs Heidelberg point +\echo geoproperty=location&georel=equals&geometry=Point&coordinates=%5B8.684783577919006%2C49.406131991436396%5D +select id, + data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as attr + from entity + where + data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }' and + ST_Equals( + ST_GeomFromGeoJSON( data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'), + ST_GeomFromGeoJSON( '{ + "type": "Point", + "coordinates": [8.684783577919006,49.406131991436396] + }') + ); + +\echo case 8 +\echo georel = disjointRel +\echo get entities that do not share any space together with NEC Labs Heidelberg building +\echo geoproperty=location&georel=disjoint&geometry=Polygon&coordinates=%5B%5B%5B8.684628009796143%2C49.406062179606515%5D%2C%5B8.685507774353027%2C49.4062262372493%5D%2C%5B8.68545413017273%2C49.40634491690448%5D%2C%5B8.684579730033875%2C49.40617736907259%5D%2C%5B8.684628009796143%2C49.406062179606515%5D%5D%5D +select id, + data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as attr + from entity + where + data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }' and + ST_Disjoint( + ST_GeomFromGeoJSON( data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'), + ST_GeomFromGeoJSON( '{ + "type": "Polygon", + "coordinates": [[[8.684628009796143,49.406062179606515],[8.685507774353027,49.4062262372493],[8.68545413017273,49.40634491690448],[8.684579730033875,49.40617736907259],[8.684628009796143,49.406062179606515]]] + }') + ); + diff --git a/scorpio-broker/Storage/StorageManager/db-scripts/ngsild-query-language-mapping.sql b/scorpio-broker/Storage/StorageManager/db-scripts/ngsild-query-language-mapping.sql new file mode 100644 index 0000000000000000000000000000000000000000..4b2bc736f5c7eaf1429368beae5023dacf4760c9 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/db-scripts/ngsild-query-language-mapping.sql @@ -0,0 +1,489 @@ +-- TODO: dateTime / date / time + +\pset pager 0 +\set ECHO queries + +/* +Query = (QueryTerm / QueryTermAssoc) *(logicalOp (QueryTerm / QueryTermAssoc)) +QueryTermAssoc = %x28 QueryTerm *(logicalOp QueryTerm) %x29 ; (QueryTerm) +QueryTerm = Attribute +QueryTerm = Attribute Operator ComparableValue +QueryTerm =/ Attribute equal CompEqualityValue +QueryTerm =/ Attribute unequal CompEqualityValue +QueryTerm =/ Attribute patternOp RegExp +QueryTerm =/ Attribute notPatternOp RegExp +Attribute = attrName / compoundAttrName / attrPathName +Operator = equal / unequal / greaterEq / greater / lessEq / less +ComparableValue = Number / quotedStr / dateTime / date / time +OtherValue = false / true +Value = ComparableValue / OtherValue +Range = ComparableValue dots ComparableValue +ValueList = Value 1*(%x2C Value) ; Value 1*(, Value) +CompEqualityValue = OtherValue / ValueList / Range / URI +equal = %x3D %x3D ; == +unequal = %x21 %x3D ; != +greater = %x3E ; > +greaterEq = %x3E %x3D ; >= +less = %x3C ; < +lessEq = %x3C %x3D ; <= +patternOp = %x7E %x3D ; ~= +notPatternOp = %x21 %x7E %x3D ; !~= +dots = %x2E %x2E ; .. +attrNameChar =/ DIGIT / ALPHA +attrNameChar =/ %x5F ; _ +attrName = 1*attrNameChar +attrPathName = attrName *(%x2E attrName) ; attrName *(. attrName) +compoundAttrName = attrName *(%x5B (attrName) %x5D) ; . attrName *([ attrName ]) +quotedStr = String ; '*char' +andOp = %x3B ; ; +orOp = %x7C ; | +logicalOp = andOp / orOp +*/ + +--tests +\x +select id, + data#>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as number_a, + data#>>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as number_b, + jsonb_typeof(data#>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') as number_c, + data#>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as string_a, + data#>>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as string_b, + jsonb_typeof(data#>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') as string_c, + data#>'{http://example.org/otherValueExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as other_a, + data#>>'{http://example.org/otherValueExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as other_b, + jsonb_typeof(data#>'{http://example.org/otherValueExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') as other_c, + data#>'{http://example.org/trueExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as true_a, + data#>>'{http://example.org/trueExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as true_b, + jsonb_typeof(data#>'{http://example.org/trueExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') as true_c, + data#>'{http://example.org/falseExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as false_a, + data#>>'{http://example.org/falseExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as false_b, + jsonb_typeof(data#>'{http://example.org/falseExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') as false_c, + data#>'{http://example.org/uriExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as uri_a, + data#>>'{http://example.org/uriExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as uri_b, + jsonb_typeof(data#>'{http://example.org/uriExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') as uri_c, + + data#>'{http://example.org/dateTimeExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as datetime_a, + data#>>'{http://example.org/dateTimeExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as datetime_b, + jsonb_typeof(data#>'{http://example.org/dateTimeExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') as datetime_c, + + data#>'{http://example.org/observedAtDateTimeExample,0,https://uri.etsi.org/ngsi-ld/observedAt,0,@value}' as datetimewithobservedat_a, + data#>>'{http://example.org/observedAtDateTimeExample,0,https://uri.etsi.org/ngsi-ld/observedAt,0,@value}' as datetimewithobservedat_b, + jsonb_typeof(data#>'{http://example.org/observedAtDateTimeExample,0,https://uri.etsi.org/ngsi-ld/observedAt,0,@value}') as datetimewithobservedat_c, + + data#>'{http://example.org/dateExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as date_a, + data#>>'{http://example.org/dateExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as date_b, + jsonb_typeof(data#>'{http://example.org/dateExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') as date_c, + + data#>'{http://example.org/timeExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as time_a, + data#>>'{http://example.org/timeExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as time_b, + jsonb_typeof(data#>'{http://example.org/timeExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') as time_c, + + data#>'{http://example.org/objectExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0}' as object_a, + data#>>'{http://example.org/objectExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0}' as object_b, + jsonb_typeof(data#>'{http://example.org/objectExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') as object_c, + + data#>'{http://example.org/multiLevelObjectExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0}' as multilevelobject_a, + data#>>'{http://example.org/multiLevelObjectExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0}' as multilevelobject_b, + jsonb_typeof(data#>'{http://example.org/multiLevelObjectExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') as multilevelobject_c, + + data#>'{http://example.org/relationshipExample,0,https://uri.etsi.org/ngsi-ld/hasObject,0,@id}' as relationship_a, + data#>>'{http://example.org/relationshipExample,0,https://uri.etsi.org/ngsi-ld/hasObject,0,@id}' as relationship_b, + jsonb_typeof(data#>'{http://example.org/relationshipExample,0,https://uri.etsi.org/ngsi-ld/hasObject,0,@id}') as relationship_c, + + data#>'{http://example.org/arrayExample1,0,https://uri.etsi.org/ngsi-ld/hasValue}' as array1_a, + data#>>'{http://example.org/arrayExample1,0,https://uri.etsi.org/ngsi-ld/hasValue}' as array1_b, + jsonb_typeof(data#>'{http://example.org/arrayExample1,0,https://uri.etsi.org/ngsi-ld/hasValue}') as array1_c, + + data#>'{http://example.org/arrayExample2,0,https://uri.etsi.org/ngsi-ld/hasValue}' as array2_a, + data#>>'{http://example.org/arrayExample2,0,https://uri.etsi.org/ngsi-ld/hasValue}' as array2_b, + jsonb_typeof(data#>'{http://example.org/arrayExample2,0,https://uri.etsi.org/ngsi-ld/hasValue}') as array2_c, + + data#>'{http://example.org/topLevelExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as toplevel_a, + data#>>'{http://example.org/topLevelExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as toplevel_b, + jsonb_typeof(data#>'{http://example.org/topLevelExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') as toplevel_c, + + data#>'{http://example.org/topLevelExample,0,http://example.org/subPropertyExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as subprop_a, + data#>>'{http://example.org/topLevelExample,0,http://example.org/subPropertyExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as subprop_b, + jsonb_typeof(data#>'{http://example.org/topLevelExample,0,http://example.org/subPropertyExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') as subprop_c + +from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' ; +\x + +-- common validation: + +-- If the target element is a Property, the target value is defined as the Value associated to such Property. If a Property has +-- multiple instances (identified by its respective datasetId), and no datasetId is explicitly addressed, the target value shall +-- be any Value of such instances. + +-- If the target element is a Relationship, the target object is defined as the object associated (represented as a URI) to +-- such Relationship. If a Relationship has multiple instances (identified by its respective datasetId), and no datasetId is +-- explicitly addressed, the target object shall be any object of such instances + +-- If the target element corresponds to a Relationship, the combination of such target element with any operator different +-- than equal or unequal shall result in not matching. + + +-- This dynamic function approach DOES NOT WORK! Solution: UNION ALL or OR (see below) +-- CREATE OR REPLACE FUNCTION f_get_attr_field(text) RETURNS text +-- AS 'select case when $1 in (''Property'', ''GeoProperty'') then ''value'' +-- when $1 = ''Relationship'' then ''object'' +-- else null end ' +-- LANGUAGE SQL +-- IMMUTABLE +-- RETURNS NULL ON NULL INPUT; + +-- q=attr1==urn:ngsi-ld:test +-- select data +-- from entity +-- where data#>>'{attr1,' || 'test' || '}' = 'urn:ngsi-ld:test'; !!! DID NOT WORK! jsonb operators do not support concatenation in the path +-- where data#>>'{attr1,' || f_get_attr_field(data#>>'{attr1,type}') || '}' = 'urn:ngsi-ld:test'; !!! DID NOT WORK! + +\echo Testing ngsi-ld types +select id, + data#>>'{http://example.org/attr1,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as attr1_value, + data#>>'{http://example.org/attr2,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as attr2_value, + data#>>'{http://example.org/attr1,0,https://uri.etsi.org/ngsi-ld/hasObject,0,@id}' as attr1_object, + data#>>'{http://example.org/attr2,0,https://uri.etsi.org/ngsi-ld/hasObject,0,@id}' as attr2_object + from entity + where id like 'urn:ngsi-ld:Test:entity%'; + +\echo q=attr1==urn:ngsi-ld:test +select data + from entity + where (data@>'{"http://example.org/attr1":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>>'{http://example.org/attr1,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' = 'urn:ngsi-ld:test') OR + (data@>'{"http://example.org/attr1":[{"@type":["https://uri.etsi.org/ngsi-ld/Relationship"]}]}' and + data#>>'{http://example.org/attr1,0,https://uri.etsi.org/ngsi-ld/hasObject,0,@id}' = 'urn:ngsi-ld:test'); +\echo solution using union, same result +select data + from entity + where data@>'{"http://example.org/attr1":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>>'{http://example.org/attr1,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' = 'urn:ngsi-ld:test' +union +select data + from entity + where data@>'{"http://example.org/attr1":[{"@type":["https://uri.etsi.org/ngsi-ld/Relationship"]}]}' and + data#>>'{http://example.org/attr1,0,https://uri.etsi.org/ngsi-ld/hasObject,0,@id}' = 'urn:ngsi-ld:test'; + +\echo q=attr1!=urn:ngsi-ld:testx +select data + from entity + where data@>'{"http://example.org/attr1":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>>'{http://example.org/attr1,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' <> 'urn:ngsi-ld:testx' +union +select data + from entity + where data@>'{"http://example.org/attr1":[{"@type":["https://uri.etsi.org/ngsi-ld/Relationship"]}]}' and + data#>>'{http://example.org/attr1,0,https://uri.etsi.org/ngsi-ld/hasObject,0,@id}' <> 'urn:ngsi-ld:testx'; + +\echo q=attr2!=urn:ngsi-ld:testx +select data + from entity + where data@>'{"http://example.org/attr2":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>>'{http://example.org/attr2,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' <> 'urn:ngsi-ld:testx' +union +select data + from entity + where data@>'{"http://example.org/attr2":[{"@type":["https://uri.etsi.org/ngsi-ld/Relationship"]}]}' and + data#>>'{http://example.org/attr2,0,https://uri.etsi.org/ngsi-ld/hasObject,0,@id}' <> 'urn:ngsi-ld:testx'; + +\echo q=attr1>7 +select data + from entity + where data@>'{"http://example.org/attr1":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/attr1,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' > '7'::jsonb; +-- as the operator is neither equal nor unequal, "union all + extra sql for object" is not required here + +-- attrPathName +\echo q=topLevelExample.subPropertyExample>4 +select id, data#>'{http://example.org/topLevelExample}' + from entity + where data@>'{"http://example.org/topLevelExample":[{"http://example.org/subPropertyExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}]}' and + data#>'{http://example.org/topLevelExample,0,http://example.org/subPropertyExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' > '4'::jsonb; + + +-- compoundAttrName +\echo q=objectExample[postalCode]==42000 +/* +Lastly, implementations shall support queries involving specific data subitems belonging to a Property Value (seed +target value) represented by a JSON object structure (complex value). For that purpose, an attribute path may contain a +trailing path (production rule named compoundAttrName) composed of a concatenated list of JSON member names, +each one enclosed in between square brackets, and intended to address a specific data subitem (member) within the seed +target value. When such a trailing path is present, implementations shall interpret and evaluate it (against the seed +target value) as a MemberExpression of Ecma 262 [21]. If the evaluation of such MemberExpression does not result in a +defined value, the target element shall be considered as non-existent for the purpose of query resolution. +EXAMPLE 9: address[addressLocality]== "Berlin". The trailing path is [addressLocality] and is used to refer to a +particular subitem within a Postal Address. + +data#>'{http://example.org/objectExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0}' + { + "https://example.org/postalCode": [ + { + "@value": "42000" + } + ], + "https://example.org/addressRegion": [ + { + "@value": "Metropolis" + } + ], + ... + } +*/ +select data + from entity + where data@>'{"http://example.org/objectExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/objectExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,https://example.org/postalCode,0,@value}' = '42000'::jsonb; + +\echo q=multiLevelObjectExample[streetAddress][streetName]=="Main Street" +/* +data#>'{http://example.org/multiLevelObjectExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0}' + { + "https://example.org/streetAddress": [ + { + "https://example.org/streetName": [ + { + "@value": "Main Street" + } + ], + "https://example.org/houseNumber": [ + { + "@value": 65 + } + ] + } + ] + } +*/ +select data + from entity + where data@>'{"http://example.org/multiLevelObjectExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/multiLevelObjectExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,https://example.org/streetAddress,0,https://example.org/streetName,0,@value}' = '"Main Street"'::jsonb; + + +-- case 1 +-- QueryTerm = Attribute Operator ComparableValue + +-- NOTE: we cannot use "#>>" (as text) for basic operations, because great/less operators will not work with number types. +-- possible solution would be to typecast. is it worthy? (please consider the URI problem below) +-- example: data#>>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' > '99' -- DOES NOT WORK! + +\echo case 1.1 +\echo Attribute = attrName +\echo Operator = equal / unequal / greater / greaterEq / less / lessEq + +\echo case 1.1.1 +\echo ComparableValue = Number +\echo q=numberExample==100 +select id, data#>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/numberExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' = '100'::jsonb; + --data@>'{"http://example.org/numberExample":[{"https://uri.etsi.org/ngsi-ld/hasValue":[{"@value":100}]}]}'; + +\echo q=numberExample>99 +select id, data#>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/numberExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' > '99'::jsonb; + +\echo case 1.1.2 +\echo ComparableValue = quotedStr +\echo q=stringExample=="Mercedes" +select id, data#>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/stringExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' = '"Mercedes"'::jsonb; +\echo q=stringExample!="Mercedes" +select id, data#>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/stringExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' <> '"Mercedes"'::jsonb; + +-- spec, page 35: When comparing dates or times, the order relation considered shall be a temporal one. +\echo case 1.1.3 +\echo ComparableValue = dateTime +\echo q=dateTimeExample>=2018-12-04T12:00:00.00000Z +select id, data#>'{http://example.org/dateTimeExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/dateTimeExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>>'{http://example.org/dateTimeExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@type}' = 'https://uri.etsi.org/ngsi-ld/DateTime' and + --data@>'{"http://example.org/dateTimeExample":[{"https://uri.etsi.org/ngsi-ld/hasValue":[{"@type":"https://uri.etsi.org/ngsi-ld/DateTime"}]}]}' and + --data#>'{http://example.org/dateTimeExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' >= '"2018-12-04T12:00:00.00001Z"'::jsonb; -- wrong result, must cast + (data#>>'{http://example.org/dateTimeExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')::timestamp >= '2018-12-04T12:00:00.00000Z'::timestamp; + +-- !!! createdAt/modifiedAt/observedAt do not have the "hasValue" element! +\echo q=observedAtDateTimeExample.observedAt==2018-12-04T12:00:00Z +select id, data#>'{http://example.org/observedAtDateTimeExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/observedAtDateTimeExample":[{"https://uri.etsi.org/ngsi-ld/observedAt":[{"@type":"https://uri.etsi.org/ngsi-ld/DateTime"}]}]}' and + -- data#>>'{http://example.org/observedAtDateTimeExample,0,https://uri.etsi.org/ngsi-ld/observedAt,0,@type}' = 'https://uri.etsi.org/ngsi-ld/DateTime' and + (data#>>'{http://example.org/observedAtDateTimeExample,0,https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::timestamp >= '2018-12-04T12:00:00Z'::timestamp; + +\echo case 1.1.4 +\echo ComparableValue = date +\echo q=dateExample>=2018-12-04 +select id, data#>'{http://example.org/dateExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/dateExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>>'{http://example.org/dateExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@type}' = 'https://uri.etsi.org/ngsi-ld/Date' and + (data#>>'{http://example.org/dateExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')::date >= '2018-12-04'::date; + +\echo case 1.1.5 +\echo ComparableValue = time +\echo q=timeExample>=12:00:00.00000Z +select id, data#>'{http://example.org/timeExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/timeExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>>'{http://example.org/timeExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@type}' = 'https://uri.etsi.org/ngsi-ld/Time' and + (data#>>'{http://example.org/timeExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')::time >= '12:00:00.00000Z'::time; + + +\echo case 2 +\echo QueryTerm = Attribute equal CompEqualityValue +\echo QueryTerm = Attribute unequal CompEqualityValue + +\echo case 2.1 +\echo CompEqualityValue = OtherValue + +\echo case 2.1.1 +\echo OtherValue = false +\echo q=falseValue==false +select id, data#>'{http://example.org/falseExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/falseExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/falseExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' = 'false'::jsonb; + +\echo case 2.1.2 +\echo OtherValue = true +\echo q=trueValue==true +select id, data#>'{http://example.org/trueExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/trueExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/trueExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' = 'true'::jsonb; + +\echo case 2.2 +\echo CompEqualityValue = ValueList +\echo ValueList = Value 1*(%x2C Value) ; Value 1*(, Value) +\echo Value = ComparableValue / OtherValue +\echo q=stringExample=="Mercedes",false,true +select id, data#>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/stringExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' in ('"Mercedes"'::jsonb, '100'::jsonb, 'false'::jsonb, 'true'::jsonb); +-- equal = "in" +-- unequal = "not in" + +\echo case 2.2.1 +\echo ComparableValue = Number +\echo q=numberExample==100,101 +select id, data#>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/numberExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' in ('100'::jsonb, '101'::jsonb); + +\echo case 2.2.2 +\echo ComparableValue = quotedStr +\echo q=stringExample=="Mercedes","BMW" +select id, data#>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/stringExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' in ('"Mercedes"'::jsonb, '"BMW"'::jsonb); + +\echo case 2.2.3 +\echo OtherValue = false +\echo OtherValue = true +\echo q=otherValueExample==false,true +select id, data#>'{http://example.org/otherValueExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/otherValueExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/otherValueExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' in ('false'::jsonb, 'true'::jsonb); + +\echo case 2.3 +\echo CompEqualityValue = Range +\echo Range = ComparableValue dots ComparableValue +\echo dots = %x2E %x2E ; .. + +-- equal = "between" +-- unequal = "not between" + +\echo case 2.3.1 +\echo ComparableValue = Number +\echo q=numberExample==99..102 +select id, data#>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/numberExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' between '99'::jsonb and '102'::jsonb; + +\echo case 2.3.2 +\echo ComparableValue = quotedStr +\echo q=stringExample=="BMW".."Volkswagen" +select id, data#>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/stringExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' between '"BMW"'::jsonb and '"Volkswagen"'::jsonb; + + +\echo case 2.4 +\echo CompEqualityValue = URI +\echo q=uriExample==http://www.example.com +\echo q=uriExample==urn:ngsi-ld:relationshipExample (relationship object) + +-- we need to detect beforehand whether the value is an URI or not, and then +-- either use the #>> operator or insert double quotes to the value (adopted solution) +select id, data#>'{http://example.org/uriExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/uriExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + -- two alternatives: + data#>'{http://example.org/uriExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' = '"http://www.example.com"'::jsonb; + --data#>>'{http://example.org/uriExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' = 'http://www.example.com' +union all +select id, data#>'{http://example.org/uriExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/uriExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Relationship"]}]}' and + -- two alternatives: + data#>'{http://example.org/uriExample,0,https://uri.etsi.org/ngsi-ld/hasObject,0,@id}' = '"http://www.example.com"'::jsonb; + --data#>>'{http://example.org/uriExample,0,https://uri.etsi.org/ngsi-ld/hasObject,0,@id}' = 'http://www.example.com'; + + +\echo case 3 +\echo QueryTerm = Attribute patternOp RegExp +\echo q=stringExample~=Mer.* +\echo q=uriExample~=http.* +\echo q=numberExample~=1.* +\echo q=otherValueExample~=tr.* +-- spec defines: "If the target value data type is different than String then it shall be considered as not matching" for patternOp. +-- always use "#>>" operator + +select id, data#>>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/stringExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + jsonb_typeof(data#>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') = 'string' and + data#>>'{http://example.org/stringExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ~ 'Mer.*'; + +select id, data#>>'{http://example.org/uriExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/uriExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + jsonb_typeof(data#>'{http://example.org/uriExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') = 'string' and + data#>>'{http://example.org/uriExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ~ 'http.*'; + +select id, data#>>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/numberExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + jsonb_typeof(data#>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') = 'string' and + data#>>'{http://example.org/numberExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ~ '1.*'; + +select id, data#>>'{http://example.org/otherValueExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/otherValueExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + jsonb_typeof(data#>'{http://example.org/otherValueExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}') = 'string' and + data#>>'{http://example.org/otherValueExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ~ 'tr.*'; + +\echo mixing object and patternOp +\echo q=multiLevelObjectExample[streetAddress][streetName]~="Main.*" +select id, data from entity + where id = 'urn:ngsi-ld:Test:all_datatypes' and + data@>'{"http://example.org/multiLevelObjectExample":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>>'{http://example.org/multiLevelObjectExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,https://example.org/streetAddress,0,https://example.org/streetName,0,@value}' ~ 'Main.*' and + jsonb_typeof(data#>'{http://example.org/multiLevelObjectExample,0,https://uri.etsi.org/ngsi-ld/hasValue,0,https://example.org/streetAddress,0,https://example.org/streetName,0,@value}') = 'string'; diff --git a/scorpio-broker/Storage/StorageManager/db-scripts/query-examples.sql b/scorpio-broker/Storage/StorageManager/db-scripts/query-examples.sql new file mode 100644 index 0000000000000000000000000000000000000000..2182dda5e4da43c2d69ba96672047119ff25da5b --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/db-scripts/query-examples.sql @@ -0,0 +1,133 @@ +\echo old version... not in expanded form. exiting... +\q + +\pset pager 0 + +\echo Query entity urn:ngsi-ld:Vehicle:A4567 +select jsonb_pretty(data) + from entity +where id = 'urn:ngsi-ld:Vehicle:A4567'; + +\echo again, using jsonb filter +select data, data->'id' + from entity + where data @> '{"id": "urn:ngsi-ld:Vehicle:A4567"}'; -- fastest way, use indexes +-- other ways to do the same filter, but "@>" is the only that use GIN indexes +-- where data->'id' = '"urn:ngsi-ld:Vehicle:A4567"'; +-- where data->>'id' = 'urn:ngsi-ld:Vehicle:A4567'; + +\echo Query NGSI-LD pre-defined members +select id, type, createdAt, modifiedAt, observedAt, location, ST_AsText(location) as location_text + from entity +limit 3; + +\echo Retrieving all entities whose the property availableSpotNumber is greater or equals (>=) to 100 +select data, (data#>>'{availableSpotNumber,value}')::numeric as spot_numeric_value, data#>'{availableSpotNumber,value}' as spot_jsonb_value + from entity + where data#>'{availableSpotNumber,value}' >= '100'::jsonb; +-- other ways of doing the same filter, check which one is faster +-- where (data#>>'{availableSpotNumber,value}')::numeric >= 100; +-- where data->'availableSpotNumber'->'value' >= '100'::jsonb; + +\echo Test Geolocation +\echo First geofence test... get every entity within the radius of 50 meters from point (0,0) +select data, + ST_AsText( location ) as location_geovalue_text + from entity + WHERE + ST_DWithin( location, + ST_GeomFromText('POINT(0 0)', 0), + 50 + ); + +\echo Second geofence test... get every entity within the radius of 2 meters from point (0,0) +select data, + ST_AsText( location ) as location_geovalue_text + from entity + WHERE + ST_DWithin( location, + ST_GeomFromText('POINT(0 0)', 0), + 2 + ); + + +\echo Test geolocation reading data from JSONB, using property name "anotherLocation" +select data, + ST_AsText( ST_GeomFromGeoJSON( data#>'{anotherLocation}'#>>'{value}') ) as geovalue_text + from entity + WHERE + data#>>'{anotherLocation,type}' = 'GeoProperty' and + ST_DWithin( ST_GeomFromGeoJSON( data#>>'{anotherLocation,value}'), + ST_GeomFromText('POINT(0 0)', 0), + 50 + ); + + +\echo Test multi-values +\echo Query tirePressure +select id, data->'tirePressure' +from entity + where id = 'urn:ngsi-ld:Vehicle:A9999'; + +\echo Complex querying multi-values +select e.id, e.data->'tirePressure'->0->'value' as tirepressure_0, t.value->'id' as tire_id, t.value->'value' as tire_value +from entity e + cross join jsonb_array_elements(data->'tirePressure') t + where id = 'urn:ngsi-ld:Vehicle:A9999' and + t.value->'value' > '6'::jsonb ; + + +\echo Test historical data +delete from entity_history; + +\echo Update tire1 pressure from 5 to 9 +update entity set data = '{ + "id":"urn:ngsi-ld:Vehicle:A9999", + "type":"Vehicle", + "brandName":{ + "type":"Property", + "value":"Mercedes" + }, + "tirePressure": [{ + "id": "tire1", + "type":"Property", + "value": 9 + }, + { + "id": "tire2", + "type":"Property", + "value": 7 + }], + "isParked":{ + "type":"Relationship", + "object":"urn:ngsi-ld:OffStreetParking:Downtown1", + "observedAt":"2017-07-29T12:00:04", + "providedBy":{ + "type":"Relationship", + "object":"urn:ngsi-ld:Person:Bob" + } + }, + "createdAt":{ + "type":"Property", + "value":"2018-06-21T12:00:04" + }, + "modifiedAt":{ + "type":"Property", + "value":"2018-06-21T12:00:04" + }, + "observedAt":{ + "type":"Property", + "value":"2018-06-20T11:59:54" + } +}' +where id = 'urn:ngsi-ld:Vehicle:A9999'; + +\echo Query tirePressure again +select id, data->'tirePressure' +from entity + where id = 'urn:ngsi-ld:Vehicle:A9999'; + +\echo Query history +select id, modifiedat, data->'tirePressure', sys_period from entity_history + where id = 'urn:ngsi-ld:Vehicle:A9999' ; + diff --git a/scorpio-broker/Storage/StorageManager/db-scripts/simple-vehicle.sql b/scorpio-broker/Storage/StorageManager/db-scripts/simple-vehicle.sql new file mode 100644 index 0000000000000000000000000000000000000000..a23f351bd9c809ab03982f2bbf421e4bfa9e6fb0 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/db-scripts/simple-vehicle.sql @@ -0,0 +1,69 @@ +insert into Entity (id, data) values ('urn:ngsi-ld:Vehicle:V1-Heidelberg', +' +{ + "@id": "urn:ngsi-ld:Vehicle:V1-Heidelberg", + "@type": [ + "http://example.org/vehicle/Vehicle" + ], + "http://example.org/vehicle/speed": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 90 + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\":\"Point\", \"coordinates\":[ 8.684783577919006, 49.406131991436396 ] }" + } + ] + } + ] +} +' +); + +insert into Entity (id, data) values ('urn:ngsi-ld:Vehicle:V2-Worms', +' +{ + "@id": "urn:ngsi-ld:Vehicle:V2-Worms", + "@type": [ + "http://example.org/vehicle/Vehicle" + ], + "http://example.org/vehicle/speed": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 100 + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\":\"Point\", \"coordinates\":[ 8.367462158203125, 49.62761437887251 ] }" + } + ] + } + ] + } +' +); \ No newline at end of file diff --git a/scorpio-broker/Storage/StorageManager/db-scripts/test-performance-insert.sql b/scorpio-broker/Storage/StorageManager/db-scripts/test-performance-insert.sql new file mode 100644 index 0000000000000000000000000000000000000000..aa6ab066c5127f3177ca9737d66980dc6e06b5d1 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/db-scripts/test-performance-insert.sql @@ -0,0 +1,282 @@ +\pset pager 0 +\timing + + + +\echo Cleaning benchmark data... +-- delete from entity where id like 'urn:ngsi-ld:Vehicle:AP%'; +truncate entity; + +\echo Inserting data... +INSERT INTO entity (id, data) + SELECT +'urn:ngsi-ld:Vehicle:AP' || i, +('{ + "@id": "urn:ngsi-ld:Vehicle:AP' || i || '", + "@type": [ + "http://example.org/vehicle/Vehicle" + ], + "http://example.org/vehicle/brandName": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "Mercedes" + } + ] + } + ], + "http://example.org/common/isParked": [ + { + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:OffStreetParking:Downtown' || i* trunc(100*random()) || '" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2017-07-29T12:00:04Z" + } + ], + "http://example.org/common/providedBy": [ + { + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:Person:Bob' || i* trunc(1000*random()) || '" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ] + } + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue":[ + { + "@value": + "{ \"type\":\"Point\", \"coordinates\":[ -' || round( (10 * random())::numeric , 1) || ', ' || round( (10 * random())::numeric , 1) || ' ] }" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "' || to_char(current_timestamp, 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "' || to_char(current_timestamp + interval '2 hours', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '" + } + ], + "http://example.org/vehicle/speed": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": ' || trunc(100*random()) || ' + } + ] + } + ] + }')::jsonb + FROM generate_series(1, 99994) i; + + +\echo Insert four specific records... (distinct speed and location) +INSERT INTO entity (id, data) + SELECT +'urn:ngsi-ld:Vehicle:AP' || i, +('{ + "@id": "urn:ngsi-ld:Vehicle:AP' || i || '", + "@type": [ + "http://example.org/vehicle/Vehicle" + ], + "http://example.org/vehicle/brandName": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "Mercedes" + } + ] + } + ], + "http://example.org/common/isParked": [ + { + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:OffStreetParking:Downtown' || i* trunc(1000*random()) || '" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2017-07-29T12:00:04Z" + } + ], + "http://example.org/common/providedBy": [ + { + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:Person:Bob' || i* trunc(1000*random()) || '" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ] + } + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue":[ + { + "@value": + "{ \"type\":\"Point\", \"coordinates\":[ -' || round( random()::numeric , 1) || ', ' || round( random()::numeric , 1) || ' ] }" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "' || to_char(current_timestamp, 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "' || to_char(current_timestamp + interval '2 hours', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '" + } + ], + "http://example.org/vehicle/speed": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": ' || trunc(1000*random()) || ' + } + ] + } + ] + }')::jsonb + FROM generate_series(99995, 99999) i; + +\echo Inserting data... +INSERT INTO entity (id, data) + SELECT +'urn:ngsi-ld:Vehicle:AP' || i, +('{ + "@id": "urn:ngsi-ld:Vehicle:AP' || i || '", + "@type": [ + "http://example.org/vehicle/Vehicle" + ], + "http://example.org/vehicle/brandName": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "Mercedes" + } + ] + } + ], + "http://example.org/common/isParked": [ + { + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:OffStreetParking:Downtown' || i* trunc(100*random()) || '" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2017-07-29T12:00:04Z" + } + ], + "http://example.org/common/providedBy": [ + { + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:Person:Bob' || i* trunc(1000*random()) || '" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ] + } + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue":[ + { + "@value": + "{ \"type\":\"Point\", \"coordinates\":[ -' || round( (10 * random())::numeric , 1) || ', ' || round( (10 * random())::numeric , 1) || ' ] }" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "' || to_char(current_timestamp, 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '" + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "' || to_char(current_timestamp + interval '2 hours', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '" + } + ], + "http://example.org/vehicle/speed": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": ' || trunc(100*random()) || ' + } + ] + } + ] + }')::jsonb + FROM generate_series(100000, 999999) i; diff --git a/scorpio-broker/Storage/StorageManager/db-scripts/test-performance-query.sql b/scorpio-broker/Storage/StorageManager/db-scripts/test-performance-query.sql new file mode 100644 index 0000000000000000000000000000000000000000..65459a7f74840ddff497f330cd5613d09d77485b --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/db-scripts/test-performance-query.sql @@ -0,0 +1,234 @@ +\pset pager 0 +\timing + +\echo Query entity urn:ngsi-ld:Vehicle:AP12345 +select jsonb_pretty(data) + from entity +where id = 'urn:ngsi-ld:Vehicle:AP12345'; + +\echo Query entity urn:ngsi-ld:Vehicle:AP99997 using a JSONB filter operator +--explain analyze +select data + from entity + where data @> '{"@id": "urn:ngsi-ld:Vehicle:AP99997"}'; -- 5ms. Bitmap Index Scan on i_entity_data (cost=0.00..51.73 rows=498 width=0) (actual time=0.117..0.117 rows=1 loops=1) +-- many ways to do the same filter. to check which one is faster (and uses GIN indexes) +-- where data->'@id' = '"urn:ngsi-ld:Vehicle:AP99997"'; -- 1096ms. Parallel Seq Scan on entity (cost=0.00..124240.91 rows=1038 width=766) (actual time=1060.945..1078.399 rows=0 loops=3) +-- where data->>'@id' = 'urn:ngsi-ld:Vehicle:AP999997'; -- 388ms (already cached). Parallel Seq Scan on entity (cost=0.00..124240.91 rows=1038 width=766) (actual time=377.601..381.259 rows=0 loops=3) +-- where data#>>'{@id}' = 'urn:ngsi-ld:Vehicle:AP999997'; -- 384ms (already cached). Parallel Seq Scan on entity (cost=0.00..124240.91 rows=1038 width=766) (actual time=359.061..377.245 rows=0 loops=3) + +\echo Query entity using a JSONB filter operator in a property of property +explain analyze +select data + from entity + where data@>'{"http://example.org/common/isParked": [ {"http://example.org/common/providedBy": [ { "https://uri.etsi.org/ngsi-ld/hasObject": [ {"@id": "urn:ngsi-ld:Person:Bob234555"}] }] } ] }'; -- 1ms. bitmap heap scan + bitmap index scan +-- many ways to do the same filter. to check which one is faster (and uses GIN indexes) +-- where data#>'{http://example.org/common/isParked,0,http://example.org/common/providedBy,0,https://uri.etsi.org/ngsi-ld/hasObject,0,"@id"}' = '"urn:ngsi-ld:Person:Bob573426"'; -- 2160ms. Parallel Seq Scan on entity (cost=0.00..128163.63 rows=1057 width=740) (actual time=2152.319..2152.319 rows=0 loops=3) +-- where data->'http://example.org/common/isParked'->0->'http://example.org/common/providedBy'->0->'https://uri.etsi.org/ngsi-ld/hasObject'->0->'@id' = '"urn:ngsi-ld:Person:Bob573426"'; -- 2161ms. Parallel Seq Scan on entity (cost=0.00..131335.26 rows=1057 width=740) (actual time=2151.097..2151.097 rows=0 loops=3) + +\echo Query NGSI-LD pre-defined members +select id, type, createdAt, modifiedAt, location, ST_AsText(location) as location_text + from entity +limit 1; + +\echo Retrieving all entities whose the speed is greater or equals (>=) to 400 +--explain analyze +select data, (data#>>'{http://example.org/vehicle/speed,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')::numeric as speed_numeric_value, data#>'{speed,value}' as speed_jsonb_value + from entity + where data@>'{"http://example.org/vehicle/speed":[{"@type":["https://uri.etsi.org/ngsi-ld/Property"]}]}' and + data#>'{http://example.org/vehicle/speed,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' >= '400'::jsonb; -- 2700ms. Seq Scan on entity (cost=0.00..133886.39 rows=161571 width=841) (actual time=666.328..969.293 rows=3 loops=1) / Filter: ((data #> '{speed,value}'::text[]) >= '400'::jsonb) +-- other ways of doing the same filter, check which one is faster +-- where (data#>>'{http://example.org/vehicle/speed,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')::numeric >= 400; -- 2991ms. Seq Scan on entity (cost=0.00..136309.95 rows=161571 width=841) (actual time=780.223..1162.166 rows=3 loops=1)/ Filter: (((data #>> '{speed,value}'::text[]))::numeric >= '400'::numeric) +-- where data->'http://example.org/vehicle/speed'->0->'https://uri.etsi.org/ngsi-ld/hasValue'->0->'@value' >= '400'::jsonb; -- 2623. Seq Scan on entity ...... +-- where data @> '{"http://example.org/vehicle/speed": {}}' and data#>'{http://example.org/vehicle/speed,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' >= '400'::jsonb; -- 2696ms. bitmap heap sacn + filter + bitmap heap scan +-- where data ? 'http://example.org/vehicle/speed' and data#>'{http://example.org/vehicle/speed,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' >= '400'::jsonb; -- 4674 ms. bitmap heap + filter + bitmap heap scan + +\echo Test using subquery +--explain analyze +select data, (data#>>'{http://example.org/vehicle/speed,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')::numeric as speed_numeric_value, data#>'{speed,value}' as speed_jsonb_value + from (select data from entity where data ? 'http://example.org/vehicle/speed') as e + where data#>'{http://example.org/vehicle/speed,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' >= '400'::jsonb; + -- 4520ms. ... + +\echo Retrieving all entities whose the speed equals to 505 +--explain analyze +select data, (data#>>'{http://example.org/vehicle/speed,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')::numeric as speed_numeric_value, + data#>'{http://example.org/vehicle/speed,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' as speed_jsonb_value + from entity +-- where data#>'{http://example.org/vehicle/speed,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' = '505'::jsonb; -- 1615ms. parallel seq scan + where data @> '{"http://example.org/vehicle/speed": [ {"https://uri.etsi.org/ngsi-ld/hasValue": [ { "@value": 505 }]}]}'; -- 0.6ms. bitmap heap scan + Bitmap Index Scan on i_entity_data + + +\echo Test Geolocation +\echo First geofence test... get every entity within the radius of 50 meters from point (0,0) +select id, + ST_AsText( location ) as location_geovalue_text + from entity + WHERE + ST_DWithin( location::geography, + ST_GeomFromText('POINT(0 0)', 0)::geography, + 50 + ) + limit 5; +-- 0.5ms + +\echo Testing also the jsonb type (this is needed even for the "location" field, because of null values) +select id, + ST_AsText( location ) as location_geovalue_text + from entity + WHERE + data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }' and + ST_DWithin( location::geography, + ST_GeomFromText('POINT(0 0)', 0)::geography, + 50 + ) + limit 5; +-- 238ms + + +\echo Second geofence test... get every entity within the radius of 1 meters from point (0,0) +select id, -- data, + ST_AsText( location ) as location_geovalue_text + from entity + WHERE + ST_DWithin( location::geography, + ST_GeomFromText('POINT(0 0)', 0)::geography, + 1 + ) + limit 5; +-- 0.8ms + +\echo Test geolocation reading data from JSONB column +select id, -- data, + ST_AsText( ST_GeomFromGeoJSON( data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' )) as geovalue_text + from entity + WHERE + data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }' and + ST_DWithin( ST_GeomFromGeoJSON( data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}')::geography, + ST_GeomFromText('POINT(0 0)', 0)::geography, + 50 + ) + limit 5; +-- 1243ms + + + +\echo Update speed to 1500 from entity urn:ngsi-ld:Vehicle:AP12347 +update entity set data = +'{ + "@id": "urn:ngsi-ld:Vehicle:AP12347", + "@type": [ + "http://example.org/vehicle/Vehicle" + ], + "http://example.org/vehicle/speed": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 1500 + } + ] + } + ], + "http://example.org/common/isParked": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ], + "http://example.org/common/providedBy": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ], + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:Person:Bob10111374" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:OffStreetParking:Downtown543224" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2017-07-29T12:00:04" + } + ] + } + ], + "http://example.org/vehicle/brandName": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "Mercedes" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": + "{ \"type\":\"Point\", \"coordinates\":[ -2786.6, 1748.9 ] }" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "2018-10-04T14:31:20" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "2018-10-04T16:31:20" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "2018-06-20T11:59:54" + } + ] + } + ] +} +' +where id = 'urn:ngsi-ld:Vehicle:AP12347'; +-- 8ms + +\echo Query again +select id, data->'http://example.org/vehicle/speed' +from entity + where id = 'urn:ngsi-ld:Vehicle:AP12347'; + diff --git a/scorpio-broker/Storage/StorageManager/db-scripts/vehicle-context-expanded.jsonld b/scorpio-broker/Storage/StorageManager/db-scripts/vehicle-context-expanded.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..de32c8f211c4edb348a1897834c78dc006588881 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/db-scripts/vehicle-context-expanded.jsonld @@ -0,0 +1,84 @@ +{ + "http://example.org/vehicle/brandName": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "Mercedes" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/createdAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2017-07-29T12:00:04Z" + } + ], + "@id": "urn:ngsi-ld:Vehicle:A4567", + "http://example.org/common/isParked": [ + { + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:OffStreetParking:Downtown1" + } + ], + "https://uri.etsi.org/ngsi-ld/observedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2017-07-29T12:00:04Z" + } + ], + "http://example.org/common/providedBy": [ + { + "https://uri.etsi.org/ngsi-ld/hasObject": [ + { + "@id": "urn:ngsi-ld:Person:Bob" + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ] + } + ], + "@type": [ + "https://uri.etsi.org/ngsi-ld/Relationship" + ] + } + ], + "https://uri.etsi.org/ngsi-ld/location": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/GeoProperty" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": "{ \"type\":\"Point\", \"coordinates\":[ -8.5, 41.2 ] }" + } + ] + } + ], + "https://uri.etsi.org/ngsi-ld/modifiedAt": [ + { + "@type": "https://uri.etsi.org/ngsi-ld/DateTime", + "@value": "2017-07-29T12:00:04Z" + } + ], + "http://example.org/vehicle/speed": [ + { + "@type": [ + "https://uri.etsi.org/ngsi-ld/Property" + ], + "https://uri.etsi.org/ngsi-ld/hasValue": [ + { + "@value": 80 + } + ] + } + ], + "@type": [ + "http://example.org/vehicle/Vehicle" + ] +} \ No newline at end of file diff --git a/scorpio-broker/Storage/StorageManager/db-scripts/vehicle.jsonld b/scorpio-broker/Storage/StorageManager/db-scripts/vehicle.jsonld new file mode 100644 index 0000000000000000000000000000000000000000..9829b29283fce5c2f5545eab821a6efd05f2c56b --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/db-scripts/vehicle.jsonld @@ -0,0 +1,43 @@ +{ + "@context":[ + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + { + "Vehicle":"http://example.org/vehicle/Vehicle", + "brandName":"http://example.org/vehicle/brandName", + "speed":"http://example.org/vehicle/speed", + "isParked":{ + "@type":"@id", + "@id":"http://example.org/common/isParked" + }, + "providedBy": { + "@type": "@id", + "@id": "http://example.org/common/providedBy" + } + } + ], + "id":"urn:ngsi-ld:Vehicle:A4567", + "type":"Vehicle", + "brandName":{ + "type":"Property", + "value":"Mercedes" + }, + "isParked":{ + "type":"Relationship", + "object":"urn:ngsi-ld:OffStreetParking:Downtown1", + "observedAt":"2017-07-29T12:00:04Z", + "providedBy":{ + "type":"Relationship", + "object":"urn:ngsi-ld:Person:Bob" + } + }, + "speed":{ + "type":"Property", + "value":80 + }, + "createdAt":"2017-07-29T12:00:04Z", + "modifiedAt":"2017-07-29T12:00:04Z", + "location":{ + "type":"GeoProperty", + "value":"{ \"type\":\"Point\", \"coordinates\":[ -8.5, 41.2 ] }" + } +} \ No newline at end of file diff --git a/scorpio-broker/Storage/StorageManager/dockerfile4maven b/scorpio-broker/Storage/StorageManager/dockerfile4maven new file mode 100644 index 0000000000000000000000000000000000000000..8a9cad9642bdedb91f078bb58edd160842fd8ec8 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/dockerfile4maven @@ -0,0 +1,13 @@ +FROM openjdk:22-ea-21-jdk-slim + +WORKDIR /usr/src/scorpio +ARG JAR_FILE_BUILD +ARG JAR_FILE_RUN +ENV JAR_FILE_RUN ${JAR_FILE_RUN} + +COPY target/${JAR_FILE_BUILD} ./${JAR_FILE_RUN} +COPY src/main/resources/application-dist.yml ./config/application.yml + +ENV spring_args "" + +CMD java -jar $JAR_FILE_RUN ${spring_args} diff --git a/scorpio-broker/Storage/StorageManager/pom.xml b/scorpio-broker/Storage/StorageManager/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..e155653b79a33d1c1a320f0fc65e5d791de4766a --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/pom.xml @@ -0,0 +1,69 @@ + + 4.0.0 + StorageManager + jar + + eu.neclab.ngsildbroker + BrokerParent + 1.0.0-SNAPSHOT + ../../BrokerParent + + 1.0.0-SNAPSHOT + + + docker + + + + com.spotify + dockerfile-maven-plugin + 1.4.12 + + + default + + build + push + + + + + dockerfile4maven + scorpiobroker/scorpio + ${project.artifactId}_${project.version} + + ${project.build.finalName}.jar + ${project.artifactId}.jar + + + + + + + + + + + org.springframework.boot + spring-boot-starter-jdbc + + + org.springframework.boot + spring-boot-starter-logging + + + + + org.flywaydb + flyway-core + + + org.postgresql + postgresql + + + + diff --git a/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/StorageManager.java b/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/StorageManager.java new file mode 100644 index 0000000000000000000000000000000000000000..c1e80a516eb995eaa90f0bd5480f366dd0145d1d --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/StorageManager.java @@ -0,0 +1,30 @@ +package eu.neclab.ngsildbroker.storagemanager; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.context.annotation.Import; + +import eu.neclab.ngsildbroker.commons.stream.service.KafkaConfig; +import eu.neclab.ngsildbroker.storagemanager.config.JdbcConfig; +import eu.neclab.ngsildbroker.storagemanager.services.StorageWriterService; +import eu.neclab.ngsildbroker.storagemanager.services.StorageReaderService; + +@SpringBootApplication +@Import(KafkaConfig.class) +public class StorageManager { + + @Autowired + JdbcConfig jdbcConfig; + + @Autowired(required=false) + StorageWriterService storageWriterService; + + @Autowired(required=false) + StorageReaderService storageReaderService; + + public static void main(String[] args) { + SpringApplication.run(StorageManager.class,args); + } + +} diff --git a/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/config/JdbcConfig.java b/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/config/JdbcConfig.java new file mode 100644 index 0000000000000000000000000000000000000000..d06bb0cd7be26afa55963637193cb51a0b967cf2 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/config/JdbcConfig.java @@ -0,0 +1,59 @@ +package eu.neclab.ngsildbroker.storagemanager.config; + + +import javax.sql.DataSource; + +import org.springframework.boot.autoconfigure.jdbc.DataSourceProperties; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Primary; +import org.springframework.jdbc.core.JdbcTemplate; + +import com.zaxxer.hikari.HikariDataSource; + +@Configuration +public class JdbcConfig { + + // https://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#howto-two-datasources + + @Bean + @Primary + @ConfigurationProperties(prefix = "reader.datasource") + public DataSourceProperties getReaderDataSourceProperties() { + return new DataSourceProperties(); + } + + @Bean(name = "readerDataSource") + @Primary + @ConfigurationProperties(prefix = "reader.datasource.hikari") + public DataSource getReaderDataSource() { + return getReaderDataSourceProperties().initializeDataSourceBuilder() + .type(HikariDataSource.class).build(); + } + + @Bean(name = "readerJdbcTemplate") + public JdbcTemplate getReaderJdbcTemplate() { + return new JdbcTemplate(getReaderDataSource()); + } + + + @Bean + @ConfigurationProperties(prefix = "writer.datasource") + public DataSourceProperties getWriterDataSourceProperties() { + return new DataSourceProperties(); + } + + @Bean(name = "writerDataSource") + @ConfigurationProperties(prefix = "writer.datasource.hikari") + public DataSource getWriterDataSource() { + return getWriterDataSourceProperties().initializeDataSourceBuilder() + .type(HikariDataSource.class).build(); + } + + @Bean(name = "writerJdbcTemplate") + public JdbcTemplate getWriterJdbcTemplate() { + return new JdbcTemplate(getWriterDataSource()); + } + +} diff --git a/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/controller/InfoController.java b/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/controller/InfoController.java new file mode 100644 index 0000000000000000000000000000000000000000..8e6faea1f00606c07730eccc8b10f5514ec6950b --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/controller/InfoController.java @@ -0,0 +1,159 @@ +package eu.neclab.ngsildbroker.storagemanager.controller; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.List; + +import javax.servlet.http.HttpServletRequest; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import eu.neclab.ngsildbroker.commons.exceptions.HttpErrorResponseException; +import eu.neclab.ngsildbroker.commons.exceptions.ResponseException; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.commons.tools.HttpUtils; +import eu.neclab.ngsildbroker.storagemanager.repository.EntityStorageReaderDAO; + +@RestController +@RequestMapping("/scorpio/v1/info") +public class InfoController { + private static final int QUERY_MANAGER = 0; + private static final int ENTITY_MANAGER = 1; + private static final int STORAGE_MANAGER = 2; + private static final int SUBSCRIPTION_MANAGER = 3; + private static final int REGISTRY_MANAGER = 4; + private static final int HISTORY_MANAGER = 5; + HashMap microService2Uri = new HashMap(); + HashMap microService2SuccessReply = new HashMap(); + HashMap microService2HttpMethod = new HashMap(); + String dummyMessage = "{\r\n" + + " \"id\": \"NOTANURI\",\r\n" + + " \"type\": \"https://uri.fiware.org/ns/data-models#AirQualityObserved\"\r\n" + + " \r\n" + + "}"; + + { + try { + microService2Uri.put(QUERY_MANAGER, new URI("http://localhost:9090/ngsi-ld/v1/entities/")); + microService2Uri.put(ENTITY_MANAGER, new URI("http://localhost:9090/ngsi-ld/v1/entities/")); + microService2Uri.put(STORAGE_MANAGER, new URI("http://localhost:9090/scorpio/v1/info/")); // This is a bit + // useless since + // this is this + // controller + // but hey + // better than + // nothing + microService2Uri.put(SUBSCRIPTION_MANAGER, new URI("http://localhost:9090/ngsi-ld/v1/subscriptions/")); + microService2Uri.put(REGISTRY_MANAGER, new URI("http://localhost:9090/ngsi-ld/v1/csourceRegistrations/")); + microService2Uri.put(HISTORY_MANAGER, new URI("http://localhost:9090/ngsi-ld/v1/temporal/entities/")); + + microService2SuccessReply.put(QUERY_MANAGER, 400); + microService2SuccessReply.put(ENTITY_MANAGER, 400); + microService2SuccessReply.put(STORAGE_MANAGER, 200); + microService2SuccessReply.put(SUBSCRIPTION_MANAGER, 200); + microService2SuccessReply.put(REGISTRY_MANAGER, 400); + microService2SuccessReply.put(HISTORY_MANAGER, 400); + + microService2HttpMethod.put(QUERY_MANAGER, 0); + microService2HttpMethod.put(ENTITY_MANAGER, 1); + microService2HttpMethod.put(STORAGE_MANAGER, 0); + microService2HttpMethod.put(SUBSCRIPTION_MANAGER, 0); + microService2HttpMethod.put(REGISTRY_MANAGER, 0); + microService2HttpMethod.put(HISTORY_MANAGER, 0); + } catch (URISyntaxException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + + } + + @Autowired + EntityStorageReaderDAO storageReaderDao; + + HttpUtils httpUtils = HttpUtils.getInstance(null); + + @GetMapping + public ResponseEntity getDefault() { + return ResponseEntity.status(HttpStatus.ACCEPTED) + .body("available subresources:\n/types\n/localtypes\n/stats\n/health"); + } + + @GetMapping(path = "/types") + public ResponseEntity getTypes(HttpServletRequest request) { + List types = storageReaderDao.getAllTypes(); + if (types == null) { + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("failed to get available types"); + } + return ResponseEntity.status(HttpStatus.ACCEPTED).header("Content-Type", "application/json") + .body(DataSerializer.toJson(types)); + } + + @GetMapping(path = "/localtypes") + public ResponseEntity getLocalTypes(HttpServletRequest request) { + List types = storageReaderDao.getLocalTypes(); + if (types == null) { + return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("failed to get available types"); + } + return ResponseEntity.status(HttpStatus.ACCEPTED).header("Content-Type", "application/json") + .body(DataSerializer.toJson(types)); + } + + @GetMapping(path = "/stats") + public ResponseEntity getStats(HttpServletRequest request) { + HashMap result = new HashMap(); + result.put("number of local available entities", storageReaderDao.getLocalEntitiesCount()); + result.put("number of local available types", storageReaderDao.getLocalTypesCount()); + return ResponseEntity.status(HttpStatus.ACCEPTED).header("Content-Type", "application/json") + .body(DataSerializer.toJson(result)); + } + + @GetMapping(path = "/health") + public ResponseEntity getHealth(HttpServletRequest request) { + HashMap result = new HashMap(); + + result.put("Status of Querymanager", getStatus(QUERY_MANAGER)); + result.put("Status of Entitymanager", getStatus(ENTITY_MANAGER)); + result.put("Status of Storagemanager", getStatus(STORAGE_MANAGER)); + result.put("Status of Subscriptionmanager", getStatus(SUBSCRIPTION_MANAGER)); + result.put("Status of Registrymanager", getStatus(REGISTRY_MANAGER)); + result.put("Status of Historymanager", getStatus(HISTORY_MANAGER)); + return ResponseEntity.status(HttpStatus.ACCEPTED).header("Content-Type", "application/json") + .body(DataSerializer.toJson(result)); + } + + private String getStatus(int component) { + URI uri = microService2Uri.get(component); + Integer success = microService2SuccessReply.get(component); + try { + switch (microService2HttpMethod.get(component)) { + case 0: + httpUtils.doGet(uri); + return "Up and running"; + case 1: + httpUtils.doPost(uri, dummyMessage, null); + return "Up and running"; + default: + return "Unable to determine status"; + } + + } catch (IOException e) { + if(e instanceof HttpErrorResponseException) { + HttpErrorResponseException httpError = (HttpErrorResponseException) e; + if(httpError.getStatusCode() == success) { + return "Up and running"; + } + } + } + return "Not running"; + + + } + +} diff --git a/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/repository/EntityStorageReaderDAO.java b/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/repository/EntityStorageReaderDAO.java new file mode 100644 index 0000000000000000000000000000000000000000..99c0ab6912df890feb61b888dfc513cd7f13d561 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/repository/EntityStorageReaderDAO.java @@ -0,0 +1,33 @@ +package eu.neclab.ngsildbroker.storagemanager.repository; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.stereotype.Repository; +import eu.neclab.ngsildbroker.commons.storage.StorageReaderDAO; + +@Repository +@ConditionalOnProperty(value="reader.enabled", havingValue = "true", matchIfMissing = false) +public class EntityStorageReaderDAO extends StorageReaderDAO { + + public Long getLocalEntitiesCount() { + List> list = readerJdbcTemplate.queryForList( + "SELECT count(id) FROM entity;"); + if(list == null ||list.isEmpty()) { + return null; + } + return (Long) list.get(0).get("count"); + + } + public Long getLocalTypesCount() { + List> list = readerJdbcTemplate.queryForList( + "SELECT count(distinct(type)) FROM entity;"); + if(list == null ||list.isEmpty()) { + return null; + } + return (Long) list.get(0).get("count"); + + } +} diff --git a/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/repository/StorageWriterDAO.java b/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/repository/StorageWriterDAO.java new file mode 100644 index 0000000000000000000000000000000000000000..891b288cfed8436d6ede0296369db883aa3e5247 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/repository/StorageWriterDAO.java @@ -0,0 +1,137 @@ +package eu.neclab.ngsildbroker.storagemanager.repository; + +import java.sql.SQLException; + +import javax.annotation.PostConstruct; +import javax.sql.DataSource; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.datasource.DataSourceTransactionManager; +import org.springframework.stereotype.Repository; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.support.TransactionCallback; +import org.springframework.transaction.support.TransactionTemplate; + +import com.google.gson.Gson; + +import eu.neclab.ngsildbroker.commons.constants.DBConstants; +import eu.neclab.ngsildbroker.commons.datatypes.TemporalEntityStorageKey; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; + +@Repository +@ConditionalOnProperty(value="writer.enabled", havingValue = "true", matchIfMissing = false) +public class StorageWriterDAO { + + private final static Logger logger = LogManager.getLogger(StorageWriterDAO.class); +// public static final Gson GSON = DataSerializer.GSON; + + @Autowired + private JdbcTemplate writerJdbcTemplate; + + @Autowired + private DataSource writerDataSource; + + private TransactionTemplate writerTransactionTemplate; + private JdbcTemplate writerJdbcTemplateWithTransaction; + + @PostConstruct + public void init() { + writerJdbcTemplate.execute("SELECT 1"); // create connection pool and connect to database + + // https://gist.github.com/mdellabitta/1444003 + DataSourceTransactionManager transactionManager = new DataSourceTransactionManager(writerDataSource); + writerJdbcTemplateWithTransaction = new JdbcTemplate(transactionManager.getDataSource()); + writerTransactionTemplate = new TransactionTemplate(transactionManager); + } + + public boolean store(String tableName, String columnName, String key, String value) throws SQLException { + try { + String sql; + int n = 0; + if (!value.equals("null")) { + sql = "INSERT INTO "+tableName+" (id, "+columnName+") VALUES (?, ?::jsonb) ON CONFLICT(id) DO UPDATE SET "+columnName+" = EXCLUDED."+columnName; + n = writerJdbcTemplate.update(sql, key, value); + } else { + sql = "DELETE FROM "+tableName+" WHERE id = ?"; + n = writerJdbcTemplate.update(sql, key); + } + logger.trace("Rows affected: " + Integer.toString(n)); + return true; //(n>0); + } catch (Exception e) { + logger.error("Exception ::",e); + e.printStackTrace(); + } + return false; + } + + public boolean storeTemporalEntity(String key, String value) throws SQLException { + try { + + TemporalEntityStorageKey tesk = DataSerializer.getTemporalEntityStorageKey(key); + + String entityId = tesk.getEntityId(); + String entityType = tesk.getEntityType(); + String entityCreatedAt = tesk.getEntityCreatedAt(); + String entityModifiedAt = tesk.getEntityModifiedAt(); + + String attributeId = tesk.getAttributeId(); + String instanceId = tesk.getInstanceId(); + Boolean overwriteOp = tesk.getOverwriteOp(); + + Integer n = 0; + + if (!value.equals("null")) { + // https://gist.github.com/mdellabitta/1444003 + n = writerTransactionTemplate.execute(new TransactionCallback() { + @Override + public Integer doInTransaction(TransactionStatus status) { + String sql; + Integer tn = 0; + if (entityId!=null && entityType!=null && entityCreatedAt!=null && entityModifiedAt!=null) { + sql = "INSERT INTO "+DBConstants.DBTABLE_TEMPORALENTITY+" (id, type, createdat, modifiedat) VALUES (?, ?, ?::timestamp, ?::timestamp) ON CONFLICT(id) DO UPDATE SET type = EXCLUDED.type, createdat = EXCLUDED.createdat, modifiedat = EXCLUDED.modifiedat"; + tn = writerJdbcTemplateWithTransaction.update(sql, entityId, entityType, entityCreatedAt, entityModifiedAt); + } + + if (entityId!=null && attributeId!=null) { + if (overwriteOp!=null && overwriteOp) { + sql = "DELETE FROM "+DBConstants.DBTABLE_TEMPORALENTITY_ATTRIBUTEINSTANCE+" WHERE temporalentity_id = ? AND attributeid = ?"; + tn += writerJdbcTemplateWithTransaction.update(sql, entityId, attributeId); + } + sql = "INSERT INTO "+DBConstants.DBTABLE_TEMPORALENTITY_ATTRIBUTEINSTANCE+" (temporalentity_id, attributeid, data) VALUES (?, ?, ?::jsonb) ON CONFLICT(temporalentity_id, attributeid, instanceid) DO UPDATE SET data = EXCLUDED.data"; + tn += writerJdbcTemplateWithTransaction.update(sql, entityId, attributeId, value); + // update modifiedat field in temporalentity + sql = "UPDATE "+DBConstants.DBTABLE_TEMPORALENTITY+" SET modifiedat = ?::timestamp WHERE id = ?"; + tn += writerJdbcTemplateWithTransaction.update(sql, entityModifiedAt, entityId); + } + return tn; + + } + }); + } else { + String sql; + if (entityId!=null && attributeId!=null && instanceId!=null) { + sql = "DELETE FROM "+DBConstants.DBTABLE_TEMPORALENTITY_ATTRIBUTEINSTANCE+" WHERE temporalentity_id = ? AND attributeid = ? AND instanceid = ?"; + n = writerJdbcTemplate.update(sql, entityId, attributeId, instanceId); + } else if (entityId!=null && attributeId!=null) { + sql = "DELETE FROM "+DBConstants.DBTABLE_TEMPORALENTITY_ATTRIBUTEINSTANCE+" WHERE temporalentity_id = ? AND attributeid = ?"; + n = writerJdbcTemplate.update(sql, entityId, attributeId); + } else if (entityId!=null) { + sql = "DELETE FROM "+DBConstants.DBTABLE_TEMPORALENTITY+" WHERE id = ?"; + n = writerJdbcTemplate.update(sql, entityId); + } + } + + logger.debug("Rows affected: " + Integer.toString(n)); + return true; + } catch (Exception e) { + logger.error("Exception ::",e); + e.printStackTrace(); + } + return false; + } + +} diff --git a/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/services/StorageReaderService.java b/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/services/StorageReaderService.java new file mode 100644 index 0000000000000000000000000000000000000000..74667bf517cde55d7f6f3d05f5e73f27621fd466 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/services/StorageReaderService.java @@ -0,0 +1,87 @@ +package eu.neclab.ngsildbroker.storagemanager.services; + +import java.io.ByteArrayOutputStream; +import java.io.DataOutputStream; +import java.util.ArrayList; +import java.util.List; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.messaging.handler.annotation.Payload; +import org.springframework.messaging.handler.annotation.SendTo; +import org.springframework.stereotype.Service; + +import com.google.common.base.Splitter; +import com.google.gson.Gson; + +import eu.neclab.ngsildbroker.commons.datatypes.QueryParams; +import eu.neclab.ngsildbroker.commons.serialization.DataSerializer; +import eu.neclab.ngsildbroker.storagemanager.repository.EntityStorageReaderDAO; + +@Service +@ConditionalOnProperty(value = "reader.enabled", havingValue = "true", matchIfMissing = false) +public class StorageReaderService { + + private final static Logger logger = LogManager.getLogger(StorageWriterService.class); + private final static int MAX_UTF_SIZE = 65535; + + // public static final Gson GSON = DataSerializer.GSON; + + @Autowired + EntityStorageReaderDAO storageReaderDao; + + @KafkaListener(topics = "${query.topic}", groupId = "queryHandler", properties = { "max.request.size=104857600" }) + @SendTo + // @SendTo("QUERY_RESULT") // for tests without QueryManager + public byte[] handleQuery(@Payload byte[] message) throws Exception { + + /* + * TODO: Ignore old messages in Kafka queue based on producer timestamp. There + * is no custom annotation in KafkaListener to always start from the latest + * offset. Source: https://github.com/spring-projects/spring-kafka/issues/914 + * + * Please note auto.offset.reset is a different thing and does not apply to this + * issue + * + * + * @Header(KafkaHeaders.TIMESTAMP) String producerTimestamp, + * + * @Header(KafkaHeaders.TIMESTAMP_TYPE) String producerTimestampType + * logger.debug("Producer timestamp: " + producerTimestamp + " (" + + * producerTimestampType + ")"); + * + */ + + logger.trace("Listener queryHandler, Thread ID: " + Thread.currentThread().getId()); + logger.trace("handleQuery() :: started"); + String payload = new String(message); + logger.debug("Received message: " + payload); + List entityList = new ArrayList(); + try { + QueryParams qp = DataSerializer.getQueryParams(payload); + entityList = storageReaderDao.query(qp); + } catch (Exception e) { + e.printStackTrace(); + } + logger.trace("Pushing result to Kafka..."); + logger.debug(storageReaderDao.getListAsJsonArray(entityList)); + // write to byte array + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + DataOutputStream out = new DataOutputStream(baos); + for (String element : entityList) { + if (element.length() > MAX_UTF_SIZE) { + for(String subelement: Splitter.fixedLength(MAX_UTF_SIZE).split(element)) { + out.writeUTF(subelement); + } + } else { + out.writeUTF(element); + } + } + logger.trace("handleQuery() :: completed"); + return baos.toByteArray(); + } + +} diff --git a/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/services/StorageWriterService.java b/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/services/StorageWriterService.java new file mode 100644 index 0000000000000000000000000000000000000000..aa98a2782258a211f66db91b6fc28c1cfbe88ecd --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/java/eu/neclab/ngsildbroker/storagemanager/services/StorageWriterService.java @@ -0,0 +1,175 @@ +package eu.neclab.ngsildbroker.storagemanager.services; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.kafka.annotation.KafkaListener; +import org.springframework.kafka.config.KafkaListenerEndpointRegistry; +import org.springframework.kafka.listener.MessageListenerContainer; +import org.springframework.kafka.support.Acknowledgment; +import org.springframework.kafka.support.KafkaHeaders; +import org.springframework.messaging.handler.annotation.Header; +import org.springframework.messaging.handler.annotation.Payload; +import org.springframework.stereotype.Service; + +import eu.neclab.ngsildbroker.commons.constants.DBConstants; +import eu.neclab.ngsildbroker.storagemanager.repository.StorageWriterDAO; + +@Service +@ConditionalOnProperty(value="writer.enabled", havingValue = "true", matchIfMissing = false) +public class StorageWriterService { + + private final static Logger logger = LogManager.getLogger(StorageWriterService.class); + + public final static String ENTITY_LISTENER_ID = "entityWriter-1"; + public final static String KVENTITY_LISTENER_ID = "kvEntityWriter-1"; + public final static String ENTITY_WITHOUT_SYSATTRS_LISTENER_ID = "entityWithoutSysAttrsWriter-1"; + public final static String CSOURCE_LISTENER_ID = "csourceWriter-1"; + public final static String TEMPORALENTITY_LISTENER_ID = "temporalEntityWriter-1"; + + @Autowired + StorageWriterDAO storageWriterDao; + + @Value("${entity.stopListenerIfDbFails:true}") + boolean entityStopListenerIfDbFails; + @Value("${csource.stopListenerIfDbFails:true}") + boolean csourceStopListenerIfDbFails; + @Value("${entity.temporal.stopListenerIfDbFails:true}") + boolean temporalEntityStopListenerIfDbFails; + + boolean entityListenerOk = true; + boolean csourceListenerOk = true; + boolean temporalEntityListenerOk = true; + + @Autowired + private KafkaListenerEndpointRegistry kafkaListenerEndpoint; + + /* + * @KafkaListener(containerFactory = + * "kafkaListenerContainerFactoryManualOffsetCommit", topics = + * "${entity.topic}", groupId = "entityWriter") public void writeEntity(@Payload + * byte[] message, Acknowledgment acknowledgment, + * + * @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) String + * key, @Header(KafkaHeaders.OFFSET) Long offset) throws Exception { + * logger.trace("Listener entityWriter, Thread ID: " + + * Thread.currentThread().getId()); logger.debug("Received key: " + key); if + * (!entityListenerOk) // this test is needed because listenerContainer.stop() + * does not work properly // during boot time (probably because of concurrency) + * return; logger.debug("Received offset: " + offset.toString()); String payload + * = new String(message); logger.debug("Received message: " + payload); + * logger.trace("Writing data..."); if (storageWriterDao != null && + * storageWriterDao.store(DBConstants.DBTABLE_ENTITY, DBConstants.DBCOLUMN_DATA, + * key, payload)) { acknowledgment.acknowledge(); + * logger.trace("Kafka offset commited"); } else { if + * (entityStopListenerIfDbFails) { entityListenerOk = false; + * logger.error("DB failed, not processing any new messages"); + * MessageListenerContainer listenerContainer = kafkaListenerEndpoint + * .getListenerContainer(ENTITY_LISTENER_ID); listenerContainer.stop(); } } + * + * logger.trace("Writing is complete"); } + * + * @KafkaListener(containerFactory = + * "kafkaListenerContainerFactoryManualOffsetCommit", topics = + * "${entity.withoutSysAttrs.topic}", groupId = "entityWithoutSysAttrsWriter") + * public void writeEntityWithoutSysAttrs(@Payload byte[] message, + * Acknowledgment acknowledgment, + * + * @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) String + * key, @Header(KafkaHeaders.OFFSET) Long offset) throws Exception { + * logger.trace("Listener entityWithoutSysAttrsWriter, Thread ID: " + + * Thread.currentThread().getId()); logger.debug("Received key: " + key); if + * (!entityListenerOk) // this test is needed because listenerContainer.stop() + * does not work properly // during boot time (probably because of concurrency) + * return; logger.debug("Received offset: " + offset.toString()); String payload + * = new String(message); logger.debug("Received message: " + payload); + * logger.trace("Writing data..."); if (storageWriterDao != null && + * storageWriterDao.store(DBConstants.DBTABLE_ENTITY, + * DBConstants.DBCOLUMN_DATA_WITHOUT_SYSATTRS, key, payload)) { + * acknowledgment.acknowledge(); logger.trace("Kafka offset commited"); } else { + * if (entityStopListenerIfDbFails) { entityListenerOk = false; + * logger.error("DB failed, not processing any new messages"); + * MessageListenerContainer listenerContainer = kafkaListenerEndpoint + * .getListenerContainer(ENTITY_WITHOUT_SYSATTRS_LISTENER_ID); + * listenerContainer.stop(); } } logger.trace("Writing is complete"); } + * + * @KafkaListener(containerFactory = + * "kafkaListenerContainerFactoryManualOffsetCommit", topics = + * "${entity.keyValues.topic}", groupId = "kvEntityWriter") public void + * writeKeyValueEntity(@Payload byte[] message, Acknowledgment acknowledgment, + * + * @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) String + * key, @Header(KafkaHeaders.OFFSET) Long offset) throws Exception { + * logger.trace("Listener kvEntityWriter, Thread ID: " + + * Thread.currentThread().getId()); logger.debug("Received key: " + key); if + * (!entityListenerOk) // this test is needed because listenerContainer.stop() + * does not work properly // during boot time (probably because of concurrency) + * return; logger.debug("Received offset: " + offset.toString()); String payload + * = new String(message); logger.debug("Received message: " + payload); + * logger.trace("Writing data..."); if (storageWriterDao != null && + * storageWriterDao.store(DBConstants.DBTABLE_ENTITY, + * DBConstants.DBCOLUMN_KVDATA, key, payload)) { acknowledgment.acknowledge(); + * logger.trace("Kafka offset commited"); } else { if + * (entityStopListenerIfDbFails) { entityListenerOk = false; + * logger.error("DB failed, not processing any new messages"); + * MessageListenerContainer listenerContainer = kafkaListenerEndpoint + * .getListenerContainer(KVENTITY_LISTENER_ID); listenerContainer.stop(); } } + * logger.trace("Writing is complete"); } + */ + @KafkaListener(containerFactory = "kafkaListenerContainerFactoryManualOffsetCommit", topics = "${csource.topic}", id = CSOURCE_LISTENER_ID, groupId = "csourceWriter", containerGroup = "csourceWriter-container") + public void writeCSource(@Payload byte[] message, Acknowledgment acknowledgment, + @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) String key, @Header(KafkaHeaders.OFFSET) Long offset) + throws Exception { + logger.trace("Listener csourceWriter, Thread ID: " + Thread.currentThread().getId()); + logger.debug("Received key: " + key); + if (!csourceListenerOk) // this test is needed because listenerContainer.stop() does not work properly + // during boot time (probably because of concurrency) + return; + String payload = new String(message); + logger.debug("Received message: " + payload); + logger.trace("Writing data..."); + if (storageWriterDao != null && storageWriterDao.store(DBConstants.DBTABLE_CSOURCE, DBConstants.DBCOLUMN_DATA, key, payload)) { + acknowledgment.acknowledge(); + logger.trace("Kafka offset commited"); + } else { + if (csourceStopListenerIfDbFails) { + csourceListenerOk = false; + logger.error("DB failed, not processing any new messages"); + MessageListenerContainer listenerContainer = kafkaListenerEndpoint + .getListenerContainer(CSOURCE_LISTENER_ID); + listenerContainer.stop(); + } + } + logger.trace("Writing is complete"); + } + + @KafkaListener(containerFactory = "kafkaListenerContainerFactoryManualOffsetCommit", topics = "${entity.temporal.topic}", id = TEMPORALENTITY_LISTENER_ID, groupId = "temporalEntityWriter", containerGroup = "temporalEntityWriter-container") + public void writeTemporalEntity(@Payload byte[] message, Acknowledgment acknowledgment, + @Header(KafkaHeaders.RECEIVED_MESSAGE_KEY) String key, @Header(KafkaHeaders.OFFSET) Long offset) + throws Exception { + logger.trace("Listener temporalEntityWriter, Thread ID: " + Thread.currentThread().getId()); + logger.debug("Received key: " + key); + if (!temporalEntityListenerOk) // this test is needed because listenerContainer.stop() does not work properly + // during boot time (probably because of concurrency) + return; + String payload = new String(message); + logger.debug("Received message: " + payload); + logger.trace("Writing data..."); + if (storageWriterDao != null && storageWriterDao.storeTemporalEntity(key, payload)) { + acknowledgment.acknowledge(); + logger.trace("Kafka offset commited"); + } else { + if (temporalEntityStopListenerIfDbFails) { + temporalEntityListenerOk = false; + logger.error("DB failed, not processing any new messages"); + MessageListenerContainer listenerContainer = kafkaListenerEndpoint + .getListenerContainer(TEMPORALENTITY_LISTENER_ID); + listenerContainer.stop(); + } + } + logger.trace("Writing is complete"); + } + +} diff --git a/scorpio-broker/Storage/StorageManager/src/main/resources/application-aaio.yml b/scorpio-broker/Storage/StorageManager/src/main/resources/application-aaio.yml new file mode 100644 index 0000000000000000000000000000000000000000..40b3aacae11b5161fbdea89201b613224091f7e6 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/resources/application-aaio.yml @@ -0,0 +1,45 @@ +server: + port: 1029 + +bootstrap: + servers: kafka:9092 + +spring: + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +reader: + enabled: true + datasource: + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_reader + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 + +writer: + enabled: true + datasource: + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_writer + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 \ No newline at end of file diff --git a/scorpio-broker/Storage/StorageManager/src/main/resources/application-aio.yml b/scorpio-broker/Storage/StorageManager/src/main/resources/application-aio.yml new file mode 100644 index 0000000000000000000000000000000000000000..688e6997a243f03ad00dc01c3e3d9ba5ffcf347e --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/resources/application-aio.yml @@ -0,0 +1,45 @@ +server: + port: 1029 + +bootstrap: + servers: localhost:9092 + +spring: + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + +eureka: + client: + serviceUrl: + defaultZone: http://localhost:8761/eureka/ + +reader: + enabled: true + datasource: + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_storagemanager_reader + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 + +writer: + enabled: true + datasource: + url: jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_storagemanager_writer + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 \ No newline at end of file diff --git a/scorpio-broker/Storage/StorageManager/src/main/resources/application-dist.yml b/scorpio-broker/Storage/StorageManager/src/main/resources/application-dist.yml new file mode 100644 index 0000000000000000000000000000000000000000..65cbb53c64ee69fe620f696c4ddb97fb4c09a2c2 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/resources/application-dist.yml @@ -0,0 +1,45 @@ +server: + port: 1029 + +bootstrap: + servers: kafka:9092 + +spring: + cloud: + stream: + kafka: + binder: + brokers: kafka:9092 + +eureka: + client: + serviceUrl: + defaultZone: http://eureka:8761/eureka/ + +reader: + enabled: true + datasource: + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_reader + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 + +writer: + enabled: true + datasource: + url: jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_writer + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 \ No newline at end of file diff --git a/scorpio-broker/Storage/StorageManager/src/main/resources/application.yml b/scorpio-broker/Storage/StorageManager/src/main/resources/application.yml new file mode 100644 index 0000000000000000000000000000000000000000..5f6c873a7c6d3b697bec67ebdb4fb1f65d2a5d19 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/resources/application.yml @@ -0,0 +1,67 @@ +spring: + application: + name: storage-manager + main: + lazy-initialization: true + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + flyway: + baselineOnMigrate: true + +server: + port: 1029 + +max: + request: + size: 104857600 +bootstrap: + servers: localhost:9092 +query: + topic: QUERY + result: + topic: QUERY_RESULT +entity: + topic: ENTITY + keyValues: + topic: KVENTITY + withoutSysAttrs: + topic: ENTITY_WITHOUT_SYSATTRS + stopListenerIfDbFails: false + temporal: + topic: TEMPORALENTITY + stopListenerIfDbFails: false +csource: + topic: CONTEXT_SOURCE + stopListenerIfDbFails: false + + +reader: + enabled: true + datasource: + url: "jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_storagemanager_reader" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP_Reader + maxLifetime: 2000000 + connectionTimeout: 30000 + +writer: + enabled: true + datasource: + url: "jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_storagemanager_writer" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP_Writer + maxLifetime: 2000000 + connectionTimeout: 30000 diff --git a/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190604.1__entity.sql b/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190604.1__entity.sql new file mode 100644 index 0000000000000000000000000000000000000000..a8259f8bf603988add0925c6985b979640cdc13e --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190604.1__entity.sql @@ -0,0 +1,57 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS Entity ( + id TEXT NOT NULL, + type TEXT, + data JSONB NOT NULL, + context JSONB, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + observationSpace GEOMETRY(Geometry, 4326), + operationSpace GEOMETRY(Geometry, 4326), + PRIMARY KEY (id)) +; + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER entity_extract_jsonb_fields BEFORE INSERT OR UPDATE ON entity + FOR EACH ROW EXECUTE PROCEDURE entity_extract_jsonb_fields(); + +-- create indexes for performance +CREATE INDEX i_entity_type ON entity (type); +CREATE INDEX i_entity_createdat ON entity (createdat); +CREATE INDEX i_entity_modifiedat ON entity (modifiedat); +CREATE INDEX i_entity_location ON entity USING GIST (location); +CREATE INDEX i_entity_observationspace ON entity USING GIST (observationspace); +CREATE INDEX i_entity_operationspace ON entity USING GIST (operationspace); + +-- to check if this index will be used by the database optimizer, or if it should be applied only for for certain keys +-- check https://www.postgresql.org/docs/current/static/datatype-json.html +CREATE INDEX i_entity_data ON entity USING GIN (data); diff --git a/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190604.2__registry.sql b/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190604.2__registry.sql new file mode 100644 index 0000000000000000000000000000000000000000..51d767f1a6ee0368765817ebce039132d1160bd9 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190604.2__registry.sql @@ -0,0 +1,135 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS csource ( + id TEXT NOT NULL, + data JSONB NOT NULL, + type TEXT, + name TEXT, + description TEXT, + timestamp_start TIMESTAMP, + timestamp_end TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + expires TIMESTAMP, + endpoint TEXT, + internal boolean default false, + has_registrationinfo_with_attrs_only BOOL NOT NULL DEFAULT FALSE, + has_registrationinfo_with_entityinfo_only BOOL NOT NULL DEFAULT FALSE, + PRIMARY KEY (id)) +; + +-- create indexes for performance +CREATE INDEX i_csource_data ON csource USING GIN (data); +CREATE INDEX i_csource_name ON csource (name); +CREATE INDEX i_csource_timestamp_start ON csource (timestamp_start); +CREATE INDEX i_csource_timestamp_end ON csource (timestamp_end); +CREATE INDEX i_csource_location ON csource USING GIST (location); +CREATE INDEX i_csource_expires ON csource (expires); +CREATE INDEX i_csource_endpoint ON csource (endpoint); +CREATE INDEX i_csource_internal ON csource (internal); + +CREATE TABLE IF NOT EXISTS csourceinformation ( + id BIGSERIAL, + csource_id TEXT NOT NULL REFERENCES csource(id) ON DELETE CASCADE ON UPDATE CASCADE, + group_id BIGINT, + entity_id TEXT, + entity_idpattern TEXT, + entity_type TEXT, + property_id TEXT, + relationship_id TEXT, + PRIMARY KEY (id)) +; +CREATE SEQUENCE csourceinformation_group_id_seq OWNED BY csourceinformation.group_id; -- used by csource trigger +-- create indexes for performance +CREATE INDEX i_csourceinformation_csource_id ON csourceinformation (csource_id); +CREATE INDEX i_csourceinformation_entity_type_id_idpattern ON csourceinformation (entity_type, entity_id, entity_idpattern); +CREATE INDEX i_csourceinformation_entity_type_id ON csourceinformation (entity_type, entity_id); +CREATE INDEX i_csourceinformation_entity_type_idpattern ON csourceinformation (entity_type, entity_idpattern); +CREATE INDEX i_csourceinformation_property_id ON csourceinformation (property_id); +CREATE INDEX i_csourceinformation_relationship_id ON csourceinformation (relationship_id); +CREATE INDEX i_csourceinformation_group_property_relationship ON csourceinformation (group_id, property_id, relationship_id); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields_to_information_table(); \ No newline at end of file diff --git a/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190604.3__temporal.sql b/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190604.3__temporal.sql new file mode 100644 index 0000000000000000000000000000000000000000..bc9d603a2b3ed51124507c18e77df266bb80c91a --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190604.3__temporal.sql @@ -0,0 +1,105 @@ +BEGIN; + +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS temporalentity ( + id TEXT NOT NULL, + type TEXT, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + PRIMARY KEY (id)) +; + +CREATE TABLE IF NOT EXISTS temporalentityattrinstance ( + internalid BIGSERIAL, + temporalentity_id TEXT NOT NULL REFERENCES temporalentity(id) ON DELETE CASCADE ON UPDATE CASCADE, + attributeid TEXT NOT NULL, + instanceid TEXT, + attributetype TEXT, + value TEXT, -- object (relationship) is also stored here + geovalue GEOMETRY, + createdat TIMESTAMP, + modifiedat TIMESTAMP, + observedat TIMESTAMP, + data JSONB NOT NULL, + static BOOL NOT NULL, + PRIMARY KEY (internalid)) +; +CREATE UNIQUE INDEX i_temporalentityattrinstance_entityid_attributeid_instanceid ON temporalentityattrinstance (temporalentity_id, attributeid, instanceid); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_extract_jsonb_fields BEFORE INSERT OR UPDATE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_update_static() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + f_count integer; + BEGIN + select into f_internalid, f_count min(internalid), count(1) from temporalentityattrinstance + where temporalentity_id = OLD.temporalentity_id AND attributeid = OLD.attributeid; + IF (f_count = 1) THEN + UPDATE temporalentityattrinstance SET static = true WHERE internalid = f_internalid; + END IF; + RETURN OLD; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_update_static AFTER DELETE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_update_static(); + +-- create indexes for performance + +CREATE INDEX i_temporalentity_type ON temporalentity (type); + +CREATE INDEX i_temporalentityattrinstance_data ON temporalentityattrinstance USING GIN (data); + +COMMIT; \ No newline at end of file diff --git a/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190611.1__sysattrs.sql b/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190611.1__sysattrs.sql new file mode 100644 index 0000000000000000000000000000000000000000..3e24ff4111eb19227e14f1b3a2b90a2d863f337e --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190611.1__sysattrs.sql @@ -0,0 +1,7 @@ +-- entity +ALTER TABLE entity ALTER data DROP NOT NULL; +ALTER TABLE entity ADD data_without_sysattrs JSONB; + +-- csource +ALTER TABLE csource ALTER data DROP NOT NULL; +ALTER TABLE csource ADD data_without_sysattrs JSONB; diff --git a/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190611.2__extract_functions_optimization.sql b/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190611.2__extract_functions_optimization.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ab6fe3c66556cde740eaf56948e95d4556fa7c9 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190611.2__extract_functions_optimization.sql @@ -0,0 +1,121 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190703.1__keyvalues.sql b/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190703.1__keyvalues.sql new file mode 100644 index 0000000000000000000000000000000000000000..fae8d021431fadf39732600f684e69f5aa43447a --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190703.1__keyvalues.sql @@ -0,0 +1 @@ +ALTER TABLE entity ADD kvdata JSONB; diff --git a/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190704.1__extract_functions_bugfix.sql b/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190704.1__extract_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..13b2ff5ba21ce08cd58465e6b7b9240c592f6f5c --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/resources/db/migration/V20190704.1__extract_functions_bugfix.sql @@ -0,0 +1,131 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/Storage/StorageManager/src/main/resources/log4j2-spring.xml b/scorpio-broker/Storage/StorageManager/src/main/resources/log4j2-spring.xml new file mode 100644 index 0000000000000000000000000000000000000000..5ccbbe1e0c3f283cbf64a986a9922f521d522c80 --- /dev/null +++ b/scorpio-broker/Storage/StorageManager/src/main/resources/log4j2-spring.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + %d %p %C{1.} [%t] %m%n + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/scorpio-broker/Storage/pom.xml b/scorpio-broker/Storage/pom.xml new file mode 100644 index 0000000000000000000000000000000000000000..d57ba56bf6dec0ff7128f8404e62a5cc957590a7 --- /dev/null +++ b/scorpio-broker/Storage/pom.xml @@ -0,0 +1,17 @@ + + 4.0.0 + Storage + pom + + eu.neclab.ngsildbroker + BrokerParent + 1.0.0-SNAPSHOT + ../BrokerParent + + 1.0.0-SNAPSHOT + + + StorageManager + + diff --git a/scorpio-broker/SubscriptionManager/target/classes/META-INF/jandex.idx b/scorpio-broker/SubscriptionManager/target/classes/META-INF/jandex.idx new file mode 100644 index 0000000000000000000000000000000000000000..d498068775c9a997526e9d3c7950b803475c0ed4 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/META-INF/jandex.idx differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/application-activemq.properties b/scorpio-broker/SubscriptionManager/target/classes/application-activemq.properties new file mode 100644 index 0000000000000000000000000000000000000000..70ac4ed8d2ab4b1ad71fbd0dee24994e8641a631 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/application-activemq.properties @@ -0,0 +1,37 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:61616} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=activemq +mysettings.messageconnection.options= +camel.component.activemq.broker-url=${bootstrap.servers} + + +scorpio.messaging.maxSize=1048576 + +#readability block########### +mp.messaging.incoming.entityretrieve.connector=smallrye-camel +mp.messaging.incoming.entityretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.entity}${mysettings.messageconnection.options} +mp.messaging.incoming.entityretrieve.auto.offset.reset=latest +mp.messaging.incoming.entityretrieve.broadcast=true +#readability block########### + +#readability block########### +mp.messaging.outgoing.subalive.connector=smallrye-camel +mp.messaging.outgoing.subalive.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.subalive}${mysettings.messageconnection.options} +mp.messaging.outgoing.subalive.merge=true +mp.messaging.incoming.subaliveretrieve.connector=smallrye-camel +mp.messaging.incoming.subaliveretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.subalive}${mysettings.messageconnection.options} +mp.messaging.incoming.subaliveretrieve.auto.offset.reset=latest +#readability block########### +mp.messaging.outgoing.subsync.connector=smallrye-camel +mp.messaging.outgoing.subsync.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.subsync}${mysettings.messageconnection.options} +mp.messaging.outgoing.subsync.merge=true +mp.messaging.incoming.subsyncretrieve.connector=smallrye-camel +mp.messaging.incoming.subsyncretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.subsync}${mysettings.messageconnection.options} +mp.messaging.incoming.subsyncretrieve.auto.offset.reset=latest + +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/application-kafka.properties b/scorpio-broker/SubscriptionManager/target/classes/application-kafka.properties new file mode 100644 index 0000000000000000000000000000000000000000..93470865c6a2dddf86a4a6a309b7c6477e536d66 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/application-kafka.properties @@ -0,0 +1,40 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:9092} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +#Kafka settings +kafka.bootstrap.servers=${bootstrap.servers} +scorpio.messaging.maxSize=1048576 +#readability block########### +mp.messaging.incoming.entityretrieve.connector=smallrye-kafka +mp.messaging.incoming.entityretrieve.topic=${scorpio.topics.entity} +mp.messaging.incoming.entityretrieve.auto.offset.reset=latest +mp.messaging.incoming.entityretrieve.broadcast=true +#readability block########### +mp.messaging.incoming.registryretrieve.connector=smallrye-kafka +mp.messaging.incoming.registryretrieve.topic=${scorpio.topics.registry} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### +mp.messaging.outgoing.subalive.connector=smallrye-kafka +mp.messaging.outgoing.subalive.topic=${scorpio.topics.subalive} +mp.messaging.outgoing.subalive.broadcast=true +#mp.messaging.outgoing.subalive.value.serializer=eu.neclab.ngsildbroker.commons.serialization.messaging.AliveAnnouncementSerializer +mp.messaging.outgoing.subalive.merge=true +mp.messaging.incoming.subaliveretrieve.connector=smallrye-kafka +mp.messaging.incoming.subaliveretrieve.topic=${scorpio.topics.subalive} +mp.messaging.incoming.subaliveretrieve.auto.offset.reset=latest +mp.messaging.incoming.subaliveretrieve.broadcast=true +#mp.messaging.incoming.subaliveretrieve.value.deserializer=eu.neclab.ngsildbroker.commons.serialization.messaging.AliveAnnouncementDeserializer +#readability block########### +mp.messaging.outgoing.subsync.connector=smallrye-kafka +mp.messaging.outgoing.subsync.topic=${scorpio.topics.subsync} +mp.messaging.outgoing.subsync.broadcast=true +#mp.messaging.outgoing.subsync.value.serializer=eu.neclab.ngsildbroker.commons.serialization.messaging.SyncMessageSerializer +mp.messaging.outgoing.subsync.merge=true +mp.messaging.incoming.subsyncretrieve.connector=smallrye-kafka +mp.messaging.incoming.subsyncretrieve.topic=${scorpio.topics.subsync} +mp.messaging.incoming.subsyncretrieve.auto.offset.reset=latest +mp.messaging.incoming.subsyncretrieve.broadcast=true + + + diff --git a/scorpio-broker/SubscriptionManager/target/classes/application-mqtt.properties b/scorpio-broker/SubscriptionManager/target/classes/application-mqtt.properties new file mode 100644 index 0000000000000000000000000000000000000000..6edcf4dd31cbc04d656eee3e9604cebf4f93c632 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/application-mqtt.properties @@ -0,0 +1,34 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:1883} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=paho-mqtt5 +mysettings.messageconnection.options= +camel.component.paho-mqtt5.broker-url=tcp://${bootstrap.servers} + +scorpio.messaging.maxSize=268435455 +#readability block########### +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### +mp.messaging.incoming.entityretrieve.connector=smallrye-camel +mp.messaging.incoming.entityretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.entity}${mysettings.messageconnection.options} +mp.messaging.incoming.entityretrieve.auto.offset.reset=latest +mp.messaging.incoming.entityretrieve.broadcast=true +#readability block########### +mp.messaging.outgoing.subalive.connector=smallrye-camel +mp.messaging.outgoing.subalive.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.subalive}${mysettings.messageconnection.options} +mp.messaging.outgoing.subalive.merge=true +mp.messaging.incoming.subaliveretrieve.connector=smallrye-camel +mp.messaging.incoming.subaliveretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.subalive}${mysettings.messageconnection.options} +mp.messaging.incoming.subaliveretrieve.auto.offset.reset=latest +#readability block########### +mp.messaging.outgoing.subsync.connector=smallrye-camel +mp.messaging.outgoing.subsync.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.subsync}${mysettings.messageconnection.options} +mp.messaging.outgoing.subsync.merge=true +mp.messaging.incoming.subsyncretrieve.connector=smallrye-camel +mp.messaging.incoming.subsyncretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.subsync}${mysettings.messageconnection.options} +mp.messaging.incoming.subsyncretrieve.auto.offset.reset=latest + diff --git a/scorpio-broker/SubscriptionManager/target/classes/application-rabbitmq.properties b/scorpio-broker/SubscriptionManager/target/classes/application-rabbitmq.properties new file mode 100644 index 0000000000000000000000000000000000000000..b7ca975c3ca4c72760448a7bae1a2d52fb6a89a0 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/application-rabbitmq.properties @@ -0,0 +1,36 @@ +mysettings.kafka.bootstrap.host=${bushost:localhost} +mysettings.kafka.bootstrap.port=${busport:5672} +bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} + +mysettings.messageconnection.protocol=rabbitmq +mysettings.messageconnection.options= +#?addresses=localhost:5672 +scorpio.messaging.maxSize=134217728 +#readability block########### +mp.messaging.incoming.entityretrieve.connector=smallrye-camel +mp.messaging.incoming.entityretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.entity}${mysettings.messageconnection.options} +mp.messaging.incoming.entityretrieve.auto.offset.reset=latest +mp.messaging.incoming.entityretrieve.broadcast=true + +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true +#readability block########### +mp.messaging.outgoing.subalive.connector=smallrye-camel +mp.messaging.outgoing.subalive.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.subalive}${mysettings.messageconnection.options} +mp.messaging.outgoing.subalive.merge=true +mp.messaging.incoming.subaliveretrieve.connector=smallrye-camel +mp.messaging.incoming.subaliveretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.subalive}${mysettings.messageconnection.options} +mp.messaging.incoming.subaliveretrieve.auto.offset.reset=latest +#readability block########### +mp.messaging.outgoing.subsync.connector=smallrye-camel +mp.messaging.outgoing.subsync.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.subsync}${mysettings.messageconnection.options} +mp.messaging.outgoing.subsync.merge=true +mp.messaging.incoming.subsyncretrieve.connector=smallrye-camel +mp.messaging.incoming.subsyncretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.subsync}${mysettings.messageconnection.options} +mp.messaging.incoming.subsyncretrieve.auto.offset.reset=latest +camel.component.rabbitmq.hostname=localhost +camel.component.rabbitmq.port-number=5672 + + diff --git a/scorpio-broker/SubscriptionManager/target/classes/application-sqs.properties b/scorpio-broker/SubscriptionManager/target/classes/application-sqs.properties new file mode 100644 index 0000000000000000000000000000000000000000..f24e7d0fb5dda6201c817657d2cc6b7ef090c9c5 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/application-sqs.properties @@ -0,0 +1,21 @@ +#mysettings.kafka.bootstrap.host=${bushost:localhost} +#mysettings.kafka.bootstrap.port=${busport:5672} +#bootstrap.servers=${mysettings.kafka.bootstrap.host}:${mysettings.kafka.bootstrap.port} +#camel.component.aws2-sqs.maximum-message-size=10485760 +mysettings.messageconnection.protocol=sns-fanout +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +mysettings.messageconnection.options= +#?addresses=localhost:5672 +scorpio.messaging.maxSize=262144 +#readability block########### +mp.messaging.incoming.entityretrieve.connector=smallrye-camel +mp.messaging.incoming.entityretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.entity}${mysettings.messageconnection.options} +mp.messaging.incoming.entityretrieve.auto.offset.reset=latest +mp.messaging.incoming.entityretrieve.broadcast=true +#readability block########### +mp.messaging.incoming.registryretrieve.connector=smallrye-camel +mp.messaging.incoming.registryretrieve.endpoint-uri=${mysettings.messageconnection.protocol}:${scorpio.topics.registry}${mysettings.messageconnection.options} +mp.messaging.incoming.registryretrieve.auto.offset.reset=latest +mp.messaging.incoming.registryretrieve.broadcast=true + + diff --git a/scorpio-broker/SubscriptionManager/target/classes/application.properties b/scorpio-broker/SubscriptionManager/target/classes/application.properties new file mode 100644 index 0000000000000000000000000000000000000000..b36444f9589b37b43e6097c325b3e1bc4f07d5b5 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/application.properties @@ -0,0 +1,47 @@ +quarkus.application.name=subscription-manager +quarkus.http.port=2026 +quarkus.log.level=INFO +quarkus.ssl.native=true +#quarkus.log.category."eu.neclab".level=DEBUG +#quarkus.vertx.event-loops-pool-size=32 +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +mysettings.postgres.host=${dbhost:localhost} +mysettings.postgres.port=${dbport:5432} +mysettings.postgres.username=${dbuser:ngb} +mysettings.postgres.password=${dbpass:ngb} +mysettings.postgres.database-name=${dbname:ngb} +mysettings.gateway.host=${gateway.host:localhost} +mysettings.gateway.port=${gateway.port:9090} +scorpio.at-context-server=http://at-context-server:2023 +atcontext.url=${scorpio.at-context-server}/ngsi-ld/v1/jsonldContexts/ +jdbcurl=jdbc:postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name}?ApplicationName=${quarkus.application.name} +scorpio.gatewayurl=http://localhost:9090 +scorpio.directDB=true +scorpio.topics.entity=ENTITY +scorpio.topics.registry=REGISTRY +scorpio.topics.internalnotification=I_NOTIFY +scorpio.topics.internalregsub=I_REGSUB +scorpio.topics.subalive=SUB_ALIVE +scorpio.topics.subsync=SUB_SYNC +scorpio.startupdelay=5s +scorpio.sync.announcement-time=30s +scorpio.sync.check-time=90s +#Database settings +quarkus.datasource.db-kind=postgresql +quarkus.datasource.username=${mysettings.postgres.username} +quarkus.datasource.password=${mysettings.postgres.password} +quarkus.datasource.jdbc.url=${jdbcurl} +quarkus.datasource.reactive.url=postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name} +quarkus.datasource.reactive.shared=true +quarkus.datasource.reactive.max-size=20 +#quarkus.datasource.reactive.name=blabliblub +quarkus.flyway.migrate-at-start=true +quarkus.flyway.baseline-on-migrate=true +quarkus.flyway.connect-retries=10 +quarkus.flyway.repair-at-start=true +selfhostcorecontext=http://localhost:9090/corecontext +ngsild.corecontext=https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context-v1.3.jsonld +scorpio.entity-manager-server=http://entity-manager:1025 +quarkus.rest-client.entity-service.url=${scorpio.entity-manager-server} +scorpio.subscription.checkinterval=2s +quarkus.rest-client.atcontext-service.url=${scorpio.at-context-server} diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190604.1__entity.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190604.1__entity.sql new file mode 100644 index 0000000000000000000000000000000000000000..a8259f8bf603988add0925c6985b979640cdc13e --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190604.1__entity.sql @@ -0,0 +1,57 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS Entity ( + id TEXT NOT NULL, + type TEXT, + data JSONB NOT NULL, + context JSONB, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + observationSpace GEOMETRY(Geometry, 4326), + operationSpace GEOMETRY(Geometry, 4326), + PRIMARY KEY (id)) +; + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER entity_extract_jsonb_fields BEFORE INSERT OR UPDATE ON entity + FOR EACH ROW EXECUTE PROCEDURE entity_extract_jsonb_fields(); + +-- create indexes for performance +CREATE INDEX i_entity_type ON entity (type); +CREATE INDEX i_entity_createdat ON entity (createdat); +CREATE INDEX i_entity_modifiedat ON entity (modifiedat); +CREATE INDEX i_entity_location ON entity USING GIST (location); +CREATE INDEX i_entity_observationspace ON entity USING GIST (observationspace); +CREATE INDEX i_entity_operationspace ON entity USING GIST (operationspace); + +-- to check if this index will be used by the database optimizer, or if it should be applied only for for certain keys +-- check https://www.postgresql.org/docs/current/static/datatype-json.html +CREATE INDEX i_entity_data ON entity USING GIN (data); diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190604.2__registry.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190604.2__registry.sql new file mode 100644 index 0000000000000000000000000000000000000000..51d767f1a6ee0368765817ebce039132d1160bd9 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190604.2__registry.sql @@ -0,0 +1,135 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS csource ( + id TEXT NOT NULL, + data JSONB NOT NULL, + type TEXT, + name TEXT, + description TEXT, + timestamp_start TIMESTAMP, + timestamp_end TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + expires TIMESTAMP, + endpoint TEXT, + internal boolean default false, + has_registrationinfo_with_attrs_only BOOL NOT NULL DEFAULT FALSE, + has_registrationinfo_with_entityinfo_only BOOL NOT NULL DEFAULT FALSE, + PRIMARY KEY (id)) +; + +-- create indexes for performance +CREATE INDEX i_csource_data ON csource USING GIN (data); +CREATE INDEX i_csource_name ON csource (name); +CREATE INDEX i_csource_timestamp_start ON csource (timestamp_start); +CREATE INDEX i_csource_timestamp_end ON csource (timestamp_end); +CREATE INDEX i_csource_location ON csource USING GIST (location); +CREATE INDEX i_csource_expires ON csource (expires); +CREATE INDEX i_csource_endpoint ON csource (endpoint); +CREATE INDEX i_csource_internal ON csource (internal); + +CREATE TABLE IF NOT EXISTS csourceinformation ( + id BIGSERIAL, + csource_id TEXT NOT NULL REFERENCES csource(id) ON DELETE CASCADE ON UPDATE CASCADE, + group_id BIGINT, + entity_id TEXT, + entity_idpattern TEXT, + entity_type TEXT, + property_id TEXT, + relationship_id TEXT, + PRIMARY KEY (id)) +; +CREATE SEQUENCE csourceinformation_group_id_seq OWNED BY csourceinformation.group_id; -- used by csource trigger +-- create indexes for performance +CREATE INDEX i_csourceinformation_csource_id ON csourceinformation (csource_id); +CREATE INDEX i_csourceinformation_entity_type_id_idpattern ON csourceinformation (entity_type, entity_id, entity_idpattern); +CREATE INDEX i_csourceinformation_entity_type_id ON csourceinformation (entity_type, entity_id); +CREATE INDEX i_csourceinformation_entity_type_idpattern ON csourceinformation (entity_type, entity_idpattern); +CREATE INDEX i_csourceinformation_property_id ON csourceinformation (property_id); +CREATE INDEX i_csourceinformation_relationship_id ON csourceinformation (relationship_id); +CREATE INDEX i_csourceinformation_group_property_relationship ON csourceinformation (group_id, property_id, relationship_id); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields_to_information_table(); \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190604.3__temporal.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190604.3__temporal.sql new file mode 100644 index 0000000000000000000000000000000000000000..bc9d603a2b3ed51124507c18e77df266bb80c91a --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190604.3__temporal.sql @@ -0,0 +1,105 @@ +BEGIN; + +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS temporalentity ( + id TEXT NOT NULL, + type TEXT, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + PRIMARY KEY (id)) +; + +CREATE TABLE IF NOT EXISTS temporalentityattrinstance ( + internalid BIGSERIAL, + temporalentity_id TEXT NOT NULL REFERENCES temporalentity(id) ON DELETE CASCADE ON UPDATE CASCADE, + attributeid TEXT NOT NULL, + instanceid TEXT, + attributetype TEXT, + value TEXT, -- object (relationship) is also stored here + geovalue GEOMETRY, + createdat TIMESTAMP, + modifiedat TIMESTAMP, + observedat TIMESTAMP, + data JSONB NOT NULL, + static BOOL NOT NULL, + PRIMARY KEY (internalid)) +; +CREATE UNIQUE INDEX i_temporalentityattrinstance_entityid_attributeid_instanceid ON temporalentityattrinstance (temporalentity_id, attributeid, instanceid); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_extract_jsonb_fields BEFORE INSERT OR UPDATE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_update_static() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + f_count integer; + BEGIN + select into f_internalid, f_count min(internalid), count(1) from temporalentityattrinstance + where temporalentity_id = OLD.temporalentity_id AND attributeid = OLD.attributeid; + IF (f_count = 1) THEN + UPDATE temporalentityattrinstance SET static = true WHERE internalid = f_internalid; + END IF; + RETURN OLD; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_update_static AFTER DELETE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_update_static(); + +-- create indexes for performance + +CREATE INDEX i_temporalentity_type ON temporalentity (type); + +CREATE INDEX i_temporalentityattrinstance_data ON temporalentityattrinstance USING GIN (data); + +COMMIT; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190611.1__sysattrs.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190611.1__sysattrs.sql new file mode 100644 index 0000000000000000000000000000000000000000..3e24ff4111eb19227e14f1b3a2b90a2d863f337e --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190611.1__sysattrs.sql @@ -0,0 +1,7 @@ +-- entity +ALTER TABLE entity ALTER data DROP NOT NULL; +ALTER TABLE entity ADD data_without_sysattrs JSONB; + +-- csource +ALTER TABLE csource ALTER data DROP NOT NULL; +ALTER TABLE csource ADD data_without_sysattrs JSONB; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ab6fe3c66556cde740eaf56948e95d4556fa7c9 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql @@ -0,0 +1,121 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190703.1__keyvalues.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190703.1__keyvalues.sql new file mode 100644 index 0000000000000000000000000000000000000000..fae8d021431fadf39732600f684e69f5aa43447a --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190703.1__keyvalues.sql @@ -0,0 +1 @@ +ALTER TABLE entity ADD kvdata JSONB; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..13b2ff5ba21ce08cd58465e6b7b9240c592f6f5c --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql @@ -0,0 +1,131 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20210206.1__tenant_function.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20210206.1__tenant_function.sql new file mode 100644 index 0000000000000000000000000000000000000000..899626ca4ed38154b7e8344e98e1e0b41459d391 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20210206.1__tenant_function.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20210206.2__tenant_field.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20210206.2__tenant_field.sql new file mode 100644 index 0000000000000000000000000000000000000000..6e5e7a7599f89a684574be098ed4a96d75068c1d --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20210206.2__tenant_field.sql @@ -0,0 +1 @@ +ALTER TABLE csource ADD tenant_id TEXT; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20210206.3__tenant_table.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20210206.3__tenant_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ea65d8e5fd612f8a5f0a3cd20d9ae081aba11f1 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20210206.3__tenant_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS tenant ( + tenant_id TEXT NOT NULL, + database_name varchar(255) UNIQUE, + PRIMARY KEY (tenant_id) +); \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20211217.1__subscription_table.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20211217.1__subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..104b878e08881a8de88364102af8b82ac5cd1a1f --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20211217.1__subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..28f87847b253efcabcac9dc467a64ea1774766fa --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20211222.1__registry_subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS registry_subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql new file mode 100644 index 0000000000000000000000000000000000000000..b8fc302dd290e0b4a560b3b5bf0c09e5fa0a199a --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql @@ -0,0 +1,163 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}')) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}')) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getGeoJson (ldjson jsonb) +RETURNS jsonb AS $geojson$ +declare + geojson jsonb; +BEGIN + SELECT json_build_object('type', substring(ldjson#>>'{@type,0}' from 32),'coordinates',getCoordinates(ldjson#>'{https://purl.org/geojson/vocab#coordinates,0,@list}')) into geojson; + RETURN geojson; +END; +$geojson$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220124.1__scope_support.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220124.1__scope_support.sql new file mode 100644 index 0000000000000000000000000000000000000000..40f3e01afad101fbea692822b60923ab63123965 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220124.1__scope_support.sql @@ -0,0 +1,52 @@ +ALTER TABLE public.entity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes,'{-1}', (i#>'{@value}')) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..77f733a8e2015aac5d0c1190fb0b5bbd6256fd24 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..0167acd3afc6a30007b262cef29778be77ec9089 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql @@ -0,0 +1,103 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220126.1__scope_support_2.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220126.1__scope_support_2.sql new file mode 100644 index 0000000000000000000000000000000000000000..6f7224edef85a212c0e339117292b2fbd78307e1 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220126.1__scope_support_2.sql @@ -0,0 +1,128 @@ +ALTER TABLE public.csource + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +ALTER TABLE public.temporalentity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220127.1__scope_support_3.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220127.1__scope_support_3.sql new file mode 100644 index 0000000000000000000000000000000000000000..aef923126f490e1683b02763d8cb70eb7f971c26 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220127.1__scope_support_3.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..a27bbc3ad1a40b4e5e7ad176746076c6cace0d70 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql new file mode 100644 index 0000000000000000000000000000000000000000..7710a0ee88d8dfd878acef4b862d42c051bb0d56 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}'), true) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}'), true) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql new file mode 100644 index 0000000000000000000000000000000000000000..6b5247225608c9e0224d3e823dcfa651b14cdfb0 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql new file mode 100644 index 0000000000000000000000000000000000000000..64998eb0a070a7e846fb27e46173897875035395 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + IF scopes IS NULL THEN + return false; + END IF; + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql new file mode 100644 index 0000000000000000000000000000000000000000..3fcb41a0d6a8461a015ac825c6a21ec9af3476e9 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220215.1__postgis2.4compat.sql @@ -0,0 +1,150 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; + +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..36f137d1768dfa06191276d5fbb6cdf1319b1ef6 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = FALSE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20221122.1__move161.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20221122.1__move161.sql new file mode 100644 index 0000000000000000000000000000000000000000..2bfd6cf469984dc77c1e20130833088fd0b3423d --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20221122.1__move161.sql @@ -0,0 +1,554 @@ +DROP TABLE csourceinformation; + +Alter table public.csource DROP COLUMN "location",DROP COLUMN "name", DROP COLUMN endpoint,DROP COLUMN description,DROP COLUMN timestamp_end,DROP COLUMN timestamp_start,DROP COLUMN tenant_id,DROP COLUMN internal,DROP COLUMN has_registrationinfo_with_attrs_only,DROP COLUMN has_registrationinfo_with_entityinfo_only,DROP COLUMN data_without_sysattrs,DROP COLUMN scopes, DROP COLUMN expires, DROP COLUMN type; + +ALTER TABLE PUBLIC.CSOURCE RENAME COLUMN data TO REG; + +alter table public.csource rename column id to c_id; + +ALTER TABLE PUBLIC.CSOURCE DROP CONSTRAINT csource_pkey; + +ALTER TABLE IF EXISTS public.csource + ADD CONSTRAINT unique_c_id UNIQUE (c_id); + +ALTER TABLE IF EXISTS public.csource + ADD COLUMN id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ); + +ALTER TABLE public.csource ADD PRIMARY KEY (id); + +CREATE INDEX i_csource_c_id + ON public.csource USING hash + (c_id text_pattern_ops); + +CREATE INDEX i_csource_id + ON public.csource USING btree + (id); + + +CREATE TABLE public.csourceinformation( + id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ), + cs_id bigint, + c_id text, + e_id text, + e_id_p text, + e_type text, + e_prop text, + e_rel text, + i_location GEOMETRY(Geometry, 4326), + scopes text[], + expires timestamp without time zone, + endpoint text, + tenant_id text, + headers jsonb, + reg_mode smallint, + createEntity boolean, + updateEntity boolean, + appendAttrs boolean, + updateAttrs boolean, + deleteAttrs boolean, + deleteEntity boolean, + createBatch boolean, + upsertBatch boolean, + updateBatch boolean, + deleteBatch boolean, + upsertTemporal boolean, + appendAttrsTemporal boolean, + deleteAttrsTemporal boolean, + updateAttrsTemporal boolean, + deleteAttrInstanceTemporal boolean, + deleteTemporal boolean, + mergeEntity boolean, + replaceEntity boolean, + replaceAttrs boolean, + mergeBatch boolean, + retrieveEntity boolean, + queryEntity boolean, + queryBatch boolean, + retrieveTemporal boolean, + queryTemporal boolean, + retrieveEntityTypes boolean, + retrieveEntityTypeDetails boolean, + retrieveEntityTypeInfo boolean, + retrieveAttrTypes boolean, + retrieveAttrTypeDetails boolean, + retrieveAttrTypeInfo boolean, + createSubscription boolean, + updateSubscription boolean, + retrieveSubscription boolean, + querySubscription boolean, + deleteSubscription boolean, + entityMap boolean, + canCompress boolean, + CONSTRAINT id_pkey PRIMARY KEY (id), + CONSTRAINT cs_id_fkey FOREIGN KEY (cs_id) + REFERENCES public.csource (id) MATCH SIMPLE + ON UPDATE CASCADE + ON DELETE CASCADE +); + + +CREATE INDEX IF NOT EXISTS fki_cs_id_fkey + ON public.csourceinformation(cs_id); + +CREATE INDEX i_csourceinformation_e_type + ON public.csourceinformation USING hash + (e_type text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_rel + ON public.csourceinformation USING hash + (e_rel text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_prop + ON public.csourceinformation USING hash + (e_prop text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_id + ON public.csourceinformation USING hash + (e_id text_pattern_ops); + +CREATE INDEX i_csourceinformation_i_location + ON public.csourceinformation USING gist + (i_location gist_geometry_ops_2d); + +DROP FUNCTION public.csource_extract_jsonb_fields_to_information_table cascade; +DROP Trigger csource_extract_jsonb_fields ON csource; + +CREATE TABLE temp ( + c_id text, + reg jsonb +); +INSERT INTO temp SELECT c_id, reg FROM csource; + +DELETE FROM csource; + +INSERT INTO csource SELECT c_id, reg FROM temp; + +drop table temp; + +ALTER TABLE PUBLIC.ENTITY RENAME COLUMN DATA TO ENTITY; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN DATA_WITHOUT_SYSATTRS; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN KVDATA; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OBSERVATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OPERATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN CONTEXT; + +ALTER TABLE PUBLIC.ENTITY ADD COLUMN E_TYPES TEXT[]; + +CREATE INDEX "I_entity_scopes" + ON public.entity USING gin + (scopes array_ops); + +CREATE INDEX "I_entity_types" + ON public.entity USING gin + (e_types array_ops); + +CREATE OR REPLACE FUNCTION public.entity_extract_jsonb_fields() RETURNS trigger LANGUAGE plpgsql AS $function$ + BEGIN + + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NULL AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NOT NULL AND OLD.ENTITY <> NEW.ENTITY) THEN + NEW.createdat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + IF (NEW.ENTITY@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.ENTITY ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + NEW.scopes = getScopes(NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + RETURN NEW; + END; +$function$; + +UPDATE ENTITY SET E_TYPES=array_append(E_TYPES,TYPE); + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN type; + + +CREATE OR REPLACE FUNCTION CSOURCE_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE +BEGIN + NEW.C_ID = NEW.REG#>>'{@id}'; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,false,false]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS(); + +CREATE OR REPLACE FUNCTION GETMODE (MODETEXT text) RETURNS smallint AS $registry_mode$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$registry_mode$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; + + + +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + RETURN jsonb_set(ENTITY,ARRAY[attribName], tmp); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; + + diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230108.1__subscription161.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230108.1__subscription161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c8115353d5ba16497cc30b10ef8a1fe6e0915041 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230108.1__subscription161.sql @@ -0,0 +1,18 @@ +DROP TABLE subscriptions; +DROP TABLE registry_subscriptions; + +CREATE TABLE public.subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); + +CREATE TABLE public.registry_subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230212.1__context.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230212.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..665c49dd33b0c8c5bfea4e2361c29df16fd01e7d --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230212.1__context.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS public.contexts +( + id text NOT NULL, + body jsonb NOT NULL, + kind text NOT NULL, + createdat timestamp without time zone, + PRIMARY KEY (id) +); +ALTER TABLE public.contexts alter createdat set default now(); diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230220.1__batchops161.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230220.1__batchops161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c31264330e2d38c953e892ff29b43295aedfc5ea --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230220.1__batchops161.sql @@ -0,0 +1,99 @@ +CREATE OR REPLACE FUNCTION NGSILD_CREATEBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOCR$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj['success'] = resultObj['success'] || (entity->'@id')::jsonb; + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOCR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_agg(entityId); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || (newentity->'@id')::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230309.1__datamigration161.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230309.1__datamigration161.sql new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230311.1__temporal161.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230311.1__temporal161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c502a34416bf47b00231f8be37f6dba50a7c0c55 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230311.1__temporal161.sql @@ -0,0 +1,65 @@ +ALTER TABLE PUBLIC.temporalentity ADD COLUMN E_TYPES TEXT[]; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN VALUE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN attributetype; +CREATE INDEX "I_temporalentity_types" + ON public.temporalentity USING gin + (e_types array_ops); +UPDATE temporalentity SET E_TYPES=array_append(E_TYPES,TYPE); +ALTER TABLE PUBLIC.temporalentity DROP COLUMN type; +ALTER TABLE PUBLIC.temporalentity ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN static; +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopeEntry (scopeList text[]) +RETURNS jsonb AS $scopes$ +declare + scopes jsonb; + i text; +BEGIN + scopes := '[]'::jsonb; + FOREACH i IN ARRAY scopeList LOOP + scopes = scopes || jsonb_build_object('@value', i); + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + if scopeList is null THEN + RETURN null; + END IF; + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE INDEX i_temporalentityattrinstance_attribname + ON public.temporalentityattrinstance USING hash + (attributeid text_ops); +CREATE INDEX i_temporalentity_location ON public.temporalentityattrinstance USING GIST (geovalue); \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230410.1__entitymap.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230410.1__entitymap.sql new file mode 100644 index 0000000000000000000000000000000000000000..92b172eb27cbfb372bfc729a44b1009b3946e4d5 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230410.1__entitymap.sql @@ -0,0 +1,19 @@ +CREATE TABLE public.entitymap +( + "q_token" text NOT NULL, + "entity_id" text, + "remote_hosts" jsonb, + "order_field" numeric NOT NULL +); + +CREATE INDEX i_entitymap_qtoken + ON public.entitymap USING hash + ("q_token" text_pattern_ops) +; + +CREATE TABLE public.entitymap_management +( + q_token text NOT NULL, + last_access timestamp with time zone NOT NULL, + PRIMARY KEY (q_token) +); diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230623.1__merge_patch.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230623.1__merge_patch.sql new file mode 100644 index 0000000000000000000000000000000000000000..684f327524131fa450d4e3deba24b4ab762ed4db --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230623.1__merge_patch.sql @@ -0,0 +1,36 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +if merged_json::text like '%"urn:ngsi-ld:null"%' THEN +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +end if; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..29a8a59a3c89cdad8b22af1254310c3d3f88c4c9 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql @@ -0,0 +1,29 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id'; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230705.1__core_context_store.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230705.1__core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..66bf42339d3705b05931f4a532703aa74769dc73 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230705.1__core_context_store.sql @@ -0,0 +1,300 @@ +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } +} +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql new file mode 100644 index 0000000000000000000000000000000000000000..af7e046119aac14e17ee33dc1cc6a074d723977c --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230711.1__getoperations_grouping.sql @@ -0,0 +1,128 @@ +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230726.1__fixsubs.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230726.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..4520fbc02736783525f5e80a3980b023ce99263c --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230726.1__fixsubs.sql @@ -0,0 +1 @@ +update subscriptions set subscription=subscription-'https://uri.etsi.org/ngsi-ld/lastFailure ' \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230810.1__historyup.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230810.1__historyup.sql new file mode 100644 index 0000000000000000000000000000000000000000..06402b2bf88db1ca416edda068dc0dee6706574d --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230810.1__historyup.sql @@ -0,0 +1,39 @@ +ALTER TABLE IF EXISTS public.temporalentityattrinstance + ADD COLUMN IF NOT EXISTS location geometry; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_location + ON public.temporalentityattrinstance USING gist + (location) + WITH (buffering=auto) +; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_entityid + ON public.temporalentityattrinstance USING hash + (temporalentity_id) +; +with x as (SELECT distinct temporalentity_id as eid, geovalue, modifiedat as mat, observedat as oat, COALESCE(modifiedat, observedat) FROM temporalentityattrinstance WHERE geovalue is not null ORDER BY COALESCE(modifiedat, observedat)) UPDATE temporalentityattrinstance SET location = (SELECT x.geovalue FROM x WHERE eid = temporalentity_id and COALESCE(x.mat, x.oat) <= COALESCE(modifiedat, observedat) ORDER BY COALESCE(modifiedat, observedat) DESC limit 1) WHERE location is not null; + +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ + diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql new file mode 100644 index 0000000000000000000000000000000000000000..a17d3b8879ba7f194546f3f3ace5f41e42e9a2ec --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql @@ -0,0 +1,52 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql new file mode 100644 index 0000000000000000000000000000000000000000..82cac5034c11506304e8109eb2aa122cd408b952 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230822.1__bugfix_createdat.sql @@ -0,0 +1,56 @@ +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + IF not attribValues ? 'https://uri.etsi.org/ngsi-ld/modifiedAt' THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + tmp := jsonb_set(tmp,Array['0','https://uri.etsi.org/ngsi-ld/modifiedAt'], Entity->'https://uri.etsi.org/ngsi-ld/modifiedAt',true); + END IF; + RETURN jsonb_set(Entity,Array[attribName,'0'], (Entity->attribName->0) || (tmp->0),true); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..833426b43969a0c3842988b8d0631e776f23cbd0 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230901.1__update_core_context_store.sql @@ -0,0 +1,314 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabularyProperty": "ngsi-ld:VocabularyProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230904.1__fixsubs.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230904.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..02ca66926497a6b82e4bcf2d39ad6a5e9ec38489 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20230904.1__fixsubs.sql @@ -0,0 +1 @@ +UPDATE SUBSCRIPTIONS SET SUBSCRIPTION=JSONB_SET(SUBSCRIPTION, '{@id}', ('"'||SUBSCRIPTION_ID||'"')::jsonb, true); \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql new file mode 100644 index 0000000000000000000000000000000000000000..a09bbd49ecbaa11601b43f09a7d630fcbcaf446b --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql @@ -0,0 +1,96 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', entityId, 'old', prev_entity); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql new file mode 100644 index 0000000000000000000000000000000000000000..5088d096c22fe1aa5e8b82aa5391b25dbd76a0e3 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql @@ -0,0 +1,57 @@ +DROP FUNCTION merge_json(text,jsonb); + +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; +ret := jsonb_build_array(previous_entity, merged_json); + + RETURN ret; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..795a2f213be016348be3eebc8c31bcd77c9f3a8f --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231024.1__tempattrsfix.sql @@ -0,0 +1,25 @@ +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE teai.internalid = new.internalid and COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql new file mode 100644 index 0000000000000000000000000000000000000000..a7437255d864ad92561c657c4e23a22cb4d951b5 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231030__batchops_temp_fix.sql @@ -0,0 +1,75 @@ +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb, NOOVERWRITE boolean) RETURNS jsonb AS $ENTITYOAR$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Overwriting'); + ELSIF NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + ELSE + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + + RETURN resultObj; +END; +$ENTITYOAR$ +LANGUAGE PLPGSQL; + + +ALTER TABLE temporalentityattrinstance ADD COLUMN IF NOT EXISTS static boolean \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231128.1__upsertfix.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231128.1__upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..573c77b1b3701ed5532925bada113667267c7dbe --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231128.1__upsertfix.sql @@ -0,0 +1,44 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..017016b3606fcb09d107b10217acec17bb799c2d --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20231201.1__update_core_context_store.sql @@ -0,0 +1,363 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240212.1__merge_batchops.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240212.1__merge_batchops.sql new file mode 100644 index 0000000000000000000000000000000000000000..c5da5b65a9b6a9189123871366d0d474a238c250 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240212.1__merge_batchops.sql @@ -0,0 +1,66 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON_BATCH(b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id',newentity->'@id')::jsonb; + ELSE + resultObj['failure'] := resultObj['failure'] || jsonb_object_agg(newentity->'@id'->>0, 'Not Found'); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240319.1__context.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240319.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..38ae052ffe9a214504c3912b7b5e6c1a92b17308 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240319.1__context.sql @@ -0,0 +1,365 @@ +ALTER TABLE public.contexts add column lastUsage timestamp without time zone, add column numberOfHits bigint default 0; + +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'Hosted'); \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..19e8cf97e5ecba2781bc4d559f05787b4fd3e9a3 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240530.1__entitymapupdate.sql @@ -0,0 +1,663 @@ + +DROP TABLE IF EXISTS public.entitymap; +DROP TABLE IF EXISTS public.entitymap_management; +DROP FUNCTION IF EXISTS ngsild_appendbatch(jsonb); +DROP FUNCTION IF EXISTS ngsild_upsertbatch(jsonb); + +CREATE OR REPLACE FUNCTION public.ngsild_deletebatch(IN entity_ids jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, 'Not Found')); + else + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', prev_entity)); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_createbatch(IN entities jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || (entity->'@id')::jsonb); + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_appendbatch(IN entities jsonb,IN nooverwrite boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Overwriting')); + ELSIF NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + ELSE + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb); + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_upsertbatch(IN entities jsonb,IN do_replace boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE TABLE public.entitymap +( + id text, + expires_at timestamp without time zone, + last_access timestamp without time zone, + entity_map jsonb, + followup_select text, + PRIMARY KEY (id) +); + +CREATE OR REPLACE FUNCTION public.getmode(IN modetext text) + RETURNS smallint + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.updateMapIfNeeded(IN ids text[], ientityMap jsonb, entityMapToken text) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entityMapEntry jsonb; + +BEGIN + if array_length(ids, 1) = 0 or ids is null then + return ientityMap; + else + entityMapEntry := ientityMap -> 'entityMap'; + SELECT jsonb_agg(entry) INTO entityMapEntry FROM jsonb_array_elements(entityMapEntry) as entry, jsonb_object_keys(entry) as id WHERE NOT(id = ANY(ids)); + ientityMap := jsonb_set(ientityMap, '{entityMap}', entityMapEntry); + UPDATE ENTITYMAP SET LAST_ACCESS = NOW(), entity_map = ientityMap WHERE id=entityMapToken; + return ientityMap; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.getEntityMapAndEntities(IN entityMapToken text, ids text[], ilimit int, ioffset int) + RETURNS TABLE(id text, entity jsonb, parent boolean, e_types text[], entity_map jsonb) + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entitymap jsonb; + regempty boolean; + noRootLevelRegEntry boolean; + queryText text; +BEGIN + if ids is null or array_length(ids, 1) = 0 then + UPDATE ENTITYMAP SET LAST_ACCESS = NOW() WHERE ENTITYMAP.id=entityMapToken RETURNING ENTITYMAP.ENTITY_MAP INTO entitymap; + if entitymap is null then + RAISE EXCEPTION 'Nonexistent ID --> %', entityMapToken USING ERRCODE = 'S0001'; + end if; + regempty := entitymap -> 'regEmptyOrNoRegEntryAndNoLinkedQuery'; + noRootLevelRegEntry := entitymap -> 'noRootLevelRegEntryAndLinkedQuery'; + + if regempty or noRootLevelRegEntry then + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY > $2), ' + || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ' limit $3), X as (SELECT D0.ID as id, max(D0.ordinality) as maxOrdinality FROM D0 GROUP BY D0.ID), C as (SELECT updateMapIfNeeded(ids.aggIds, $4, $5) as entity_map FROM (SELECT ARRAY_AGG(a.id) as aggIds FROM a LEFT JOIN X ON a.id = X.ID WHERE X.ID IS NULL AND a.ordinality <= X.maxOrdinality) as ids)' + || (entitymap ->> 'finalselect')) using (entitymap->'entityMap'), ioffset, ilimit, entitymap, entityMapToken; + else + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY between $2 and ($2 + $3) and entityIdEntry.value ? ''@none''), C as (SELECT $4 as entity_map), ' || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ')' ||(entitymap ->> 'finalselect')) using entitymap->'entityMap', ioffset, ilimit, entitymap; + end if; + else + if regempty or noRootLevelRegEntry then + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + else + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + end if; + end if; +END; +$BODY$; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS entitymap; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS cancompress; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN queryEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN createEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN updateEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN deleteEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN retrieveEntityMap boolean; + +UPDATE public.csourceinformation SET queryEntityMap = false,createEntityMap = false, updateEntityMap = false, deleteEntityMap = false,retrieveEntityMap = false; + +CREATE OR REPLACE FUNCTION public.getoperations(IN operationjson jsonb) + RETURNS boolean[] + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + operations[37] = true; + operations[38] = true; + operations[39] = true; + operations[40] = true; + operations[41] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'queryEntityMap' THEN + operations[37] = true; + WHEN 'createEntityMap' THEN + operations[38] = true; + WHEN 'updateEntityMap' THEN + operations[39] = true; + WHEN 'deleteEntityMap' THEN + operations[40] = true; + WHEN 'retrieveEntityMap' THEN + operations[41] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.csourceinformation_extract_jsonb_fields() + RETURNS trigger + LANGUAGE 'plpgsql' + VOLATILE + COST 100 +AS $BODY$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..474a2ef4780544dc6697fefec62900f6c79bc1ed --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240730.1__mergejsonupdate.sql @@ -0,0 +1,834 @@ +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id',newentity->>'@id', 'old', previous_entity)); + ELSE + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$BODY$; + +UPDATE contexts SET body = '{ + + "@context": { + + "@version": 1.1, + + "@protected": true, + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + + "geojson": "https://purl.org/geojson/vocab#", + + "id": "@id", + + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + + "AttributeList": "ngsi-ld:AttributeList", + + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + + "Date": "ngsi-ld:Date", + + "DateTime": "ngsi-ld:DateTime", + + "EntityType": "ngsi-ld:EntityType", + + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + + "EntityTypeList": "ngsi-ld:EntityTypeList", + + "Feature": "geojson:Feature", + + "FeatureCollection": "geojson:FeatureCollection", + + "GeoProperty": "ngsi-ld:GeoProperty", + + "GeometryCollection": "geojson:GeometryCollection", + + "JsonProperty": "ngsi-ld:JsonProperty", + + "LanguageProperty": "ngsi-ld:LanguageProperty", + + "LineString": "geojson:LineString", + + "ListProperty": "ngsi-ld:ListProperty", + + "ListRelationship": "ngsi-ld:ListRelationship", + + "MultiLineString": "geojson:MultiLineString", + + "MultiPoint": "geojson:MultiPoint", + + "MultiPolygon": "geojson:MultiPolygon", + + "Notification": "ngsi-ld:Notification", + + "Point": "geojson:Point", + + "Polygon": "geojson:Polygon", + + "Property": "ngsi-ld:Property", + + "Relationship": "ngsi-ld:Relationship", + + "Subscription": "ngsi-ld:Subscription", + + "TemporalProperty": "ngsi-ld:TemporalProperty", + + "Time": "ngsi-ld:Time", + + "VocabProperty": "ngsi-ld:VocabProperty", + + "accept": "ngsi-ld:accept", + + "attributeCount": "attributeCount", + + "attributeDetails": "attributeDetails", + + "attributeList": { + + "@id": "ngsi-ld:attributeList", + + "@type": "@vocab" + + }, + + "attributeName": { + + "@id": "ngsi-ld:attributeName", + + "@type": "@vocab" + + }, + + "attributeNames": { + + "@id": "ngsi-ld:attributeNames", + + "@type": "@vocab" + + }, + + "attributeTypes": { + + "@id": "ngsi-ld:attributeTypes", + + "@type": "@vocab" + + }, + + "attributes": { + + "@id": "ngsi-ld:attributes", + + "@type": "@vocab" + + }, + + "attrs": "ngsi-ld:attrs", + + "avg": { + + "@id": "ngsi-ld:avg", + + "@container": "@list" + + }, + + "bbox": { + + "@container": "@list", + + "@id": "geojson:bbox" + + }, + + "cacheDuration": "ngsi-ld:cacheDuration", + + "containedBy": "ngsi-ld:isContainedBy", + + "contextSourceAlias": "ngsi-ld:contextSourceAlias", + + "contextSourceExtras": { + + "@id": "ngsi-ld:contextSourceExtras", + + "@type": "@json" + + }, + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + + "contextSourceTimeAt": { + + "@id": "ngsi-ld:contextSourceTimeAt", + + "@type": "DateTime" + + }, + + "contextSourceUptime": "ngsi-ld:contextSourceUptime", + + "cooldown": "ngsi-ld:cooldown", + + "coordinates": { + + "@container": "@list", + + "@id": "geojson:coordinates" + + }, + + "createdAt": { + + "@id": "ngsi-ld:createdAt", + + "@type": "DateTime" + + }, + + "csf": "ngsi-ld:csf", + + "data": "ngsi-ld:data", + + "dataset": { + + "@id": "ngsi-ld:hasDataset", + + "@container": "@index" + + }, + + "datasetId": { + + "@id": "ngsi-ld:datasetId", + + "@type": "@id" + + }, + + "deletedAt": { + + "@id": "ngsi-ld:deletedAt", + + "@type": "DateTime" + + }, + + "description": "http://purl.org/dc/terms/description", + + "detail": "ngsi-ld:detail", + + "distinctCount": { + + "@id": "ngsi-ld:distinctCount", + + "@container": "@list" + + }, + + "endAt": { + + "@id": "ngsi-ld:endAt", + + "@type": "DateTime" + + }, + + "endTimeAt": { + + "@id": "ngsi-ld:endTimeAt", + + "@type": "DateTime" + + }, + + "endpoint": "ngsi-ld:endpoint", + + "entities": "ngsi-ld:entities", + + "pick": "ngsi-ld:pick", + + "omit": "ngsi-ld:omit", + + "jsonKeys": "ngsi-ld:jsonKeys", + + "entity": "ngsi-ld:entity", + + "entityCount": "ngsi-ld:entityCount", + + "entityId": { + + "@id": "ngsi-ld:entityId", + + "@type": "@id" + + }, + + "entityList": { + + "@id": "ngsi-ld:entityList", + + "@container": "@list" + + }, + + "entityMap": "ngsi-ld:hasEntityMap", + + "error": "ngsi-ld:error", + + "errors": "ngsi-ld:errors", + + "expiresAt": { + + "@id": "ngsi-ld:expiresAt", + + "@type": "DateTime" + + }, + + "features": { + + "@container": "@set", + + "@id": "geojson:features" + + }, + + "format": "ngsi-ld:format", + + "geoQ": "ngsi-ld:geoQ", + + "geometry": "geojson:geometry", + + "geoproperty": "ngsi-ld:geoproperty", + + "georel": "ngsi-ld:georel", + + "idPattern": "ngsi-ld:idPattern", + + "information": "ngsi-ld:information", + + "instanceId": { + + "@id": "ngsi-ld:instanceId", + + "@type": "@id" + + }, + + "isActive": "ngsi-ld:isActive", + + "join": "ngsi-ld:join", + + "joinLevel": "ngsi-ld:hasJoinLevel", + + "json": { + + "@id": "ngsi-ld:hasJSON", "@type": "@json" + + }, + + "jsons": { + + "@id": "ngsi-ld:jsons", + + "@container": "@list" + + }, + + "key": "ngsi-ld:hasKey", + + "lang": "ngsi-ld:lang", + + "languageMap": { + + "@id": "ngsi-ld:hasLanguageMap", + + "@container": "@language" + + }, + + "languageMaps": { + + "@id": "ngsi-ld:hasLanguageMaps", + + "@container": "@list" + + }, + + "lastFailure": { + + "@id": "ngsi-ld:lastFailure", + + "@type": "DateTime" + + }, + + "lastNotification": { + + "@id": "ngsi-ld:lastNotification", + + "@type": "DateTime" + + }, + + "lastSuccess": { + + "@id": "ngsi-ld:lastSuccess", + + "@type": "DateTime" + + }, + + "linkedMaps": "ngsi-ld:linkedMaps", + + "localOnly": "ngsi-ld:localOnly", + + "location": "ngsi-ld:location", + + "management": "ngsi-ld:management", + + "managementInterval": "ngsi-ld:managementInterval", + + "max": { + + "@id": "ngsi-ld:max", + + "@container": "@list" + + }, + + "min": { + + "@id": "ngsi-ld:min", + + "@container": "@list" + + }, + + "mode": "ngsi-ld:mode", + + "modifiedAt": { + + "@id": "ngsi-ld:modifiedAt", + + "@type": "DateTime" + + }, + + "notification": "ngsi-ld:notification", + + "notificationTrigger": "ngsi-ld:notificationTrigger", + + "notifiedAt": { + + "@id": "ngsi-ld:notifiedAt", + + "@type": "DateTime" + + }, + + "notifierInfo": "ngsi-ld:notifierInfo", + + "notUpdated": "ngsi-ld:notUpdated", + + "object": { + + "@id": "ngsi-ld:hasObject", + + "@type": "@id" + + }, + + "objectList": { + + "@id": "ngsi-ld:hasObjectList", + + "@container": "@list" + + }, + + "objects": { + + "@id": "ngsi-ld:hasObjects", + + "@container": "@list" + + }, + + "objectsLists": { + + "@id": "ngsi-ld:hasObjectsLists", + + "@container": "@list" + + }, + + "objectType": { + + "@id": "ngsi-ld:hasObjectType", + + "@type": "@vocab" + + }, + + "observationInterval": "ngsi-ld:observationInterval", + + "observationSpace": "ngsi-ld:observationSpace", + + "observedAt": { + + "@id": "ngsi-ld:observedAt", + + "@type": "DateTime" + + }, + + "operationSpace": "ngsi-ld:operationSpace", + + "operations": "ngsi-ld:operations", + + "previousJson": { + + "@id": "ngsi-ld:hasPreviousJson", + + "@type": "@json" + + }, + + "previousLanguageMap": { + + "@id": "ngsi-ld:hasPreviousLanguageMap", + + "@container": "@language" + + }, + + "previousObject": { + + "@id": "ngsi-ld:hasPreviousObject", + + "@type": "@id" + + }, + + "previousObjectList": { + + "@id": "ngsi-ld:hasPreviousObjectList", + + "@container": "@list" + + }, + + "previousValue": "ngsi-ld:hasPreviousValue", + + "previousValueList": { + + "@id": "ngsi-ld:hasPreviousValueList", + + "@container": "@list" + + }, + + "previousVocab": { + + "@id": "ngsi-ld:hasPreviousVocab", + + "@type": "@vocab" + + }, + + "properties": "geojson:properties", + + "propertyNames": { + + "@id": "ngsi-ld:propertyNames", + + "@type": "@vocab" + + }, + + "q": "ngsi-ld:q", + + "reason": "ngsi-ld:reason", + + "receiverInfo": "ngsi-ld:receiverInfo", + + "refreshRate": "ngsi-ld:refreshRate", + + "registrationId": "ngsi-ld:registrationId", + + "registrationName": "ngsi-ld:registrationName", + + "relationshipNames": { + + "@id": "ngsi-ld:relationshipNames", + + "@type": "@vocab" + + }, + + "scope": "ngsi-ld:scope", + + "scopeQ": "ngsi-ld:scopeQ", + + "showChanges": "ngsi-ld:showChanges", + + "startAt": { + + "@id": "ngsi-ld:startAt", + + "@type": "DateTime" + + }, + + "status": "ngsi-ld:status", + + "stddev": { + + "@id": "ngsi-ld:stddev", + + "@container": "@list" + + }, + + "subscriptionId": { + + "@id": "ngsi-ld:subscriptionId", + + "@type": "@id" + + }, + + "subscriptionName": "ngsi-ld:subscriptionName", + + "success": { + + "@id": "ngsi-ld:success", + + "@type": "@id" + + }, + + "sum": { + + "@id": "ngsi-ld:sum", + + "@container": "@list" + + }, + + "sumsq": { + + "@id": "ngsi-ld:sumsq", + + "@container": "@list" + + }, + + "sysAttrs": "ngsi-ld:sysAttrs", + + "temporalQ": "ngsi-ld:temporalQ", + + "tenant": { + + "@id": "ngsi-ld:tenant", + + "@type": "@id" + + }, + + "throttling": "ngsi-ld:throttling", + + "timeAt": { + + "@id": "ngsi-ld:timeAt", + + "@type": "DateTime" + + }, + + "timeInterval": "ngsi-ld:timeInterval", + + "timeout": "ngsi-ld:timeout", + + "timeproperty": "ngsi-ld:timeproperty", + + "timerel": "ngsi-ld:timerel", + + "timesFailed": "ngsi-ld:timesFailed", + + "timesSent": "ngsi-ld:timesSent", + + "title": "http://purl.org/dc/terms/title", + + "totalCount": { + + "@id": "ngsi-ld:totalCount", + + "@container": "@list" + + }, + + "triggerReason": "ngsi-ld:triggerReason", + + "typeList": { + + "@id": "ngsi-ld:typeList", + + "@type": "@vocab" + + }, + + "typeName": { + + "@id": "ngsi-ld:typeName", + + "@type": "@vocab" + + }, + + "typeNames": { + + "@id": "ngsi-ld:typeNames", + + "@type": "@vocab" + + }, + + "unchanged": "ngsi-ld:unchanged", + + "unitCode": "ngsi-ld:unitCode", + + "updated": "ngsi-ld:updated", + + "uri": "ngsi-ld:uri", + + "value": "ngsi-ld:hasValue", + + "valueList": { + + "@id": "ngsi-ld:hasValueList", + + "@container": "@list" + + }, + + "valueLists": { + + "@id": "ngsi-ld:hasValueLists", + + "@container": "@list" + + }, + + "values": { + + "@id": "ngsi-ld:hasValues", + + "@container": "@list" + + }, + + "vocab": { + + "@id": "ngsi-ld:hasVocab", + + "@type": "@vocab" + + }, + + "vocabs": { + + "@id": "ngsi-ld:hasVocabs", + + "@container": "@list" + + }, + + "watchedAttributes": { + + "@id": "ngsi-ld:watchedAttributes", + + "@type": "@vocab" + + }, + + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + + } + +} + +'::jsonb WHERE id=')$%^&'; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..90d4785b7e7d4b82c6ac1bf4c88ac56043f995bc --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240801.1__mergepatchfix.sql @@ -0,0 +1,963 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_point(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE +BEGIN + if not geo_json_entry ? '@list' or jsonb_array_length(geo_json_entry #> '{@list}') != 2 then + RAISE EXCEPTION 'Invalid geo point for geo json' USING ERRCODE = 'SB006'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.VALIDATE_GEO_JSON(IN GEO_JSON_ENTRY JSONB) RETURNS VOID LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + geo_type text; + value jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.clean_ngsi_ld_null(IN json_entry jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + json_type text; + result jsonb; + value jsonb; + cleaned jsonb; + key text; +BEGIN + json_type = jsonb_typeof(json_entry); + if json_type = 'array' then + result = '[]'::jsonb; + for value in select * from jsonb_array_elements(json_entry) loop + cleaned = clean_ngsi_ld_null(value); + if cleaned is not null then + result = result || cleaned; + end if; + end loop; + if jsonb_array_length(result) = 0 then + return null; + end if; + return result; + elsif json_type = 'object' then + result = '{}'; + for key, value in Select * from jsonb_each(json_entry) loop + if value::text != '"urn:ngsi-ld:null"' then + result = jsonb_set(result, '{key}', value); + end if; + end loop; + if result::text = '{}' then + return null; + end if; + return result; + else + if json_entry::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return json_entry; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_json(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_type text; + old_type text; + todelete jsonb; + deleted integer; + i integer; + index integer; + value jsonb; + value2 jsonb; + merged_json jsonb; + key text; +BEGIN + new_type = jsonb_typeof(new_attrib); + old_type = jsonb_typeof(old_attrib); + if old_attrib is null or new_type != old_type then + old_attrib := new_attrib; + end if; + todelete = '[]'::jsonb; + if new_type = 'array' then + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + for i in 0 .. jsonb_array_length(new_attrib) loop + if new_attrib ->> i = 'urn:ngsi-ld:null' then + todelete = todelete || i; + end if; + end loop; + deleted = 0; + if array_length(todelete) > 0 then + for i in select * from jsonb_array_elements(todelete) loop + new_attrib = new_attrib - (i - deleted); + deleted = deleted + 1; + end loop; + end if; + return new_attrib; + end if; + index = 0; + deleted = 0; + for value in select * from jsonb_array_elements(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + index := index + 1; + continue; + end if; + value2 = old_attrib[index - deleted]; + merged_json = merge_has_json(value, value2); + if merged_json is null then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - deleted)]::text[], merged_json); + end if; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + elsif new_type = 'object' then + for key, value in Select * from jsonb_each(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - key; + continue; + end if; + merged_json = merge_has_json(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + continue; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end loop; + if old_attrib::text = '{}' then + return null; + end if; + return old_attrib; + else + if new_attrib::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return new_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_vocab(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_language_map(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + index integer; + remove boolean; + value2 jsonb; + ln_found boolean; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in Select * from jsonb_array_elements(new_attrib) loop + if value ->> '@language' = '@none' and value ->> '@value' = 'urn:ngsi-ld:null' then + return null; + else + index = 0; + ln_found = false; + remove = false; + for value2 in Select * from jsonb_array_elements(old_attrib) loop + if value2 ->> '@language' = value->> '@language' then + ln_found = true; + if value ->> '@value' = 'urn:ngsi-ld:null' then + remove = true; + end if; + exit; + end if; + index = index + 1; + end loop; + if ln_found then + if remove then + old_attrib = old_attrib - index; + else + old_attrib = jsonb_set(old_attrib, ARRAY[index,'@value']::text[], value->'@value'); + end if; + else + old_attrib = old_attrib || value; + end if; + end if; + end loop; + RETURN old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_geo(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + key text; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,https://purl.org/geojson/vocab#coordinates,0,@list,0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + for value in select * from jsonb_array_elements(new_attrib) loop + PERFORM validate_geo_json(value); + end loop; + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://purl.org/geojson/vocab#coordinates' then + if value2 #>> '{0,@list,0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + elsif key = '@type' then + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + PERFORM validate_geo_json(old_attrib[(index - removed)]); + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib #> '{0,@list}'; + if old_attrib is null then + old_attrib = new_attrib; + end if; + old_value_list = old_attrib #> '{0,@list}'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed)]::text[], (old_attrib #> ARRAY[0,'@list',(index-removed)]::text[]) - key); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed),key]::text[], merged_json); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB004'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; + +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib -> '@list'; + if old_attrib is null then + old_attrib := new_attrib; + end if; + old_value_list = old_attrib -> '@list'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], value2); + end if; + elsif key = '@list' then + merged_json = merge_has_value_list(value, old_value_list[index - removed]); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + + else + merged_json = merge_has_value(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + arr_idx integer; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + arr_idx := index - removed; + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - arr_idx; + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], value2); + end if; + else + arr_idx := index - removed; + merged_json = merge_has_value(value2, old_attrib #> ARRAY[arr_idx,key]::text[]); + if merged_json is null then + old_attrib[arr_idx] = old_attrib[arr_idx] - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_ATTRIB_INSTANCE(IN NEW_ATTRIB JSONB, + + IN OLD_ATTRIB JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + attrib_type TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + new_attrib := new_attrib - 'https://uri.etsi.org/ngsi-ld/createdAt'; + attrib_type := old_attrib #>> '{@type,0}'; + if attrib_type != new_attrib #>> '{@type,0}' then + RAISE EXCEPTION 'Cannot change type of an attribute' USING ERRCODE = 'SB001'; + end if; + if attrib_type = 'https://uri.etsi.org/ngsi-ld/Property' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/Relationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValueList' then + merged_json = merge_has_value_list(value[0], old_attrib #> '{key,0}'); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListRelationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectList' then + merged_json = merge_has_object_list(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/GeoProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value_geo(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/LanguageProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasLanguageMap' then + merged_json = merge_has_language_map(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/VocabProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasVocab' then + merged_json = merge_has_vocab(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/JsonProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasJSON' then + merged_json = merge_has_json(value #> ARRAY[0,'@value']::text[], old_attrib #> ARRAY[key,0,'@value']::text[]); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key,0,'@value']::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + else + RAISE EXCEPTION 'Unknown type of an attribute %, %, %', attrib_type, old_attrib, new_attrib USING ERRCODE = 'SB002'; + end if; + return old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + else + if new_dataset_id is null then + deleted := deleted || 'null'; + else + deleted := deleted || new_dataset_id; + end if; + end if; + else + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_JSON(IN A text,IN B JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL UNSAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + deleted JSONB; + updated JSONB; +BEGIN + +Select entity into previous_entity from entity where id =a; +if previous_entity is null then + RAISE EXCEPTION 'Entity not found.' USING ERRCODE = '02000'; +end if; +Select entity into merged_json from entity where id =a; +deleted := '{}'; +updated := '{}'; +-- Iterate through keys in JSON B +FOR key, value IN SELECT * FROM JSONB_EACH(b) +LOOP + if key = '@id' or key = 'https://uri.etsi.org/ngsi-ld/createdAt'then + continue; + elsif key = '@type' then + value2 = merged_json -> key; + WITH combined AS ( + SELECT jsonb_array_elements(value) AS elem + UNION + SELECT jsonb_array_elements(value2) AS elem + ) + SELECT jsonb_agg(elem) into value2 AS merged_array FROM combined; + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' then + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value); + else + value2 = merged_json -> key; + value2 = merge_attrib(value, value2); + if value2 ->'result' = 'null'::jsonb or jsonb_array_length(value2 ->'result') = 0 then + merged_json = merged_json - key; + deleted = jsonb_set(deleted, ARRAY[key]::text[], '["@all"]'::jsonb); + else + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2 -> 'result'); + if jsonb_array_length(value2 -> 'deleted') != 0 then + if deleted ? key then + deleted = jsonb_set(deleted, ARRAY[key], ((deleted -> key) || (value2 -> 'deleted'))); + else + deleted = jsonb_set(deleted, ARRAY[key], ((value2 -> 'deleted'))); + end if; + end if; + + if jsonb_array_length(value2 -> 'updated') != 0 then + if updated ? key then + updated = jsonb_set(updated, ARRAY[key], ((updated -> key) || (value2 -> 'updated'))); + else + updated = jsonb_set(updated, ARRAY[key], ((value2 -> 'updated'))); + end if; + end if; + + end if; + + + end if; +END LOOP; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + +RETURN jsonb_build_object('old', previous_entity, 'new', merged_json, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; + newentity jsonb; + resultObj jsonb; + entityId text; + index integer; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + index := 0; + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + entityId := newentity->>'@id'; + IF entityId is null then + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object('no id row nr ' || index, 'No entity id provided')); + else + BEGIN + ret := MERGE_JSON(entityId, newentity); + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', ret -> 'old', 'new', ret -> 'new', 'deleted', ret -> 'deleted', 'updated', ret -> 'updated')::jsonb); + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entityId, SQLSTATE)); + END; + end if; + index := index + 1; + END LOOP; + RETURN resultObj; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..f9eea8fca78af88cd9cca10817372067d2fec0e3 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240905.1__validategeojsonfix.sql @@ -0,0 +1,69 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_json(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + geo_type text; + value jsonb; + value2 jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPoint' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi point update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiLineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPolygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240922.1__mergeattrib.sql b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240922.1__mergeattrib.sql new file mode 100644 index 0000000000000000000000000000000000000000..98411df52c5cfd8208b71983d6624d4bfd7452b9 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/classes/db/migration/V20240922.1__mergeattrib.sql @@ -0,0 +1,71 @@ +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || ('null'::jsonb); + else + updated := updated || to_jsonb(new_dataset_id); + end if; + else + if new_dataset_id is null then + deleted := deleted || ('null'::jsonb); + else + deleted := deleted || to_jsonb(new_dataset_id); + end if; + end if; + else + if new_dataset_id is null then + updated := updated || ('null'::jsonb); + else + updated := updated || to_jsonb(new_dataset_id); + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/NotificationController.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/NotificationController.class new file mode 100644 index 0000000000000000000000000000000000000000..f2bad67c032460e818a1a963e6cfd9bfa433d8a4 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/NotificationController.class differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionController.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionController.class new file mode 100644 index 0000000000000000000000000000000000000000..2ed503fa2413f2fe62cc0ecc513a6443dade9476 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionController.class differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingBase.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingBase.class new file mode 100644 index 0000000000000000000000000000000000000000..bd6304d16efa49a1199dcb8a23fa0f1db097e1c4 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingBase.class differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingByteArray.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingByteArray.class new file mode 100644 index 0000000000000000000000000000000000000000..e09423dec9b5c53838b0f0e91474c0f9249d62e6 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingByteArray.class differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingInMemory.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingInMemory.class new file mode 100644 index 0000000000000000000000000000000000000000..fb71a87a2eb297b8f3d67caab48181d8546061dd Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingInMemory.class differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingString.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingString.class new file mode 100644 index 0000000000000000000000000000000000000000..df9ad8fef5e91e84d226d05b66fc732218cdedcf Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingString.class differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncSQS.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncSQS.class new file mode 100644 index 0000000000000000000000000000000000000000..a181cdf3262c6e9f19eb37e71a05d6fdbc88c0c4 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncSQS.class differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceBase.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceBase.class new file mode 100644 index 0000000000000000000000000000000000000000..e05e4eb215ab26d684ff2a5c1bfc5b61f29abc50 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceBase.class differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceByteArray.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceByteArray.class new file mode 100644 index 0000000000000000000000000000000000000000..753e2cbbf589af794a45eef38c2b25603efc3c35 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceByteArray.class differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceString.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceString.class new file mode 100644 index 0000000000000000000000000000000000000000..f2557df493a94caff526fb24c96087ca7ea15ddc Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceString.class differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SyncService.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SyncService.class new file mode 100644 index 0000000000000000000000000000000000000000..fe97a721bc630ff5c5c0cf7c88fbbdfb201e8bca Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SyncService.class differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/repository/SubscriptionInfoDAO.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/repository/SubscriptionInfoDAO.class new file mode 100644 index 0000000000000000000000000000000000000000..7ae6e8586aca604e46accd498eed752d2c02500d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/repository/SubscriptionInfoDAO.class differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/service/LocalContextService.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/service/LocalContextService.class new file mode 100644 index 0000000000000000000000000000000000000000..97aee64e4c286f60786ea126ebf918b263c91670 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/service/LocalContextService.class differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/service/LocalEntityService.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/service/LocalEntityService.class new file mode 100644 index 0000000000000000000000000000000000000000..eed9cb82303d9bc18a0ac50912a168a28ef688eb Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/service/LocalEntityService.class differ diff --git a/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionService.class b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionService.class new file mode 100644 index 0000000000000000000000000000000000000000..74bdeda59d92178db3c4b18ccd68cc6d79f470a4 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/classes/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionService.class differ diff --git a/scorpio-broker/SubscriptionManager/target/maven-archiver/pom.properties b/scorpio-broker/SubscriptionManager/target/maven-archiver/pom.properties new file mode 100644 index 0000000000000000000000000000000000000000..dda6bae1917ce830d9a4bf4aa3a6a32feca4314e --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/maven-archiver/pom.properties @@ -0,0 +1,5 @@ +#Generated by Maven +#Fri Jan 03 03:27:45 UTC 2025 +groupId=eu.neclab.ngsildbroker +artifactId=subscription-manager +version=5.0.5-SNAPSHOT diff --git a/scorpio-broker/SubscriptionManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst b/scorpio-broker/SubscriptionManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..9c0761f65c9bc063cb000b37beb63e7242d97720 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst @@ -0,0 +1,15 @@ +eu/neclab/ngsildbroker/subscriptionmanager/repository/SubscriptionInfoDAO.class +eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingString.class +eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncSQS.class +eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingByteArray.class +eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceBase.class +eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionService.class +eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionController.class +eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceString.class +eu/neclab/ngsildbroker/subscriptionmanager/controller/NotificationController.class +eu/neclab/ngsildbroker/subscriptionmanager/messaging/SyncService.class +eu/neclab/ngsildbroker/subscriptionmanager/service/LocalContextService.class +eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingBase.class +eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingInMemory.class +eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceByteArray.class +eu/neclab/ngsildbroker/subscriptionmanager/service/LocalEntityService.class diff --git a/scorpio-broker/SubscriptionManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst b/scorpio-broker/SubscriptionManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..04004d6c11029a7d0ba933a3e2db1ce0bc6f6202 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst @@ -0,0 +1,15 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/NotificationController.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionController.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingBase.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingByteArray.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingInMemory.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionMessagingString.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncSQS.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceBase.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceByteArray.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SubscriptionSyncServiceString.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/messaging/SyncService.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/repository/SubscriptionInfoDAO.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/LocalContextService.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/LocalEntityService.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/main/java/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionService.java diff --git a/scorpio-broker/SubscriptionManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst b/scorpio-broker/SubscriptionManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..0e849c79d6f44a7568953ff026258d66d9d072fe --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/createdFiles.lst @@ -0,0 +1,4 @@ +eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionControllerTest.class +eu/neclab/ngsildbroker/subscriptionmanager/controller/NotificationControllerTest.class +eu/neclab/ngsildbroker/subscriptionmanager/controller/CustomProfile.class +eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionServiceTest.class diff --git a/scorpio-broker/SubscriptionManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst b/scorpio-broker/SubscriptionManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..7c21e4a74d561ce0933636c0d47e3bd7d74f31d4 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/maven-status/maven-compiler-plugin/testCompile/default-testCompile/inputFiles.lst @@ -0,0 +1,5 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/SubscriptionHandlerTest.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/CustomProfile.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/NotificationControllerTest.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionControllerTest.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/SubscriptionManager/src/test/java/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionServiceTest.java diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/app/subscription-manager-5.0.5-SNAPSHOT.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/app/subscription-manager-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..9a8b52101950a02687fdf6a63d5847665ebb3928 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/app/subscription-manager-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..9d5d452f4a66d1165b27d4d604d2bddaad0e4cd7 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d9c956f0001cff936eb20c373592dcc6510f5ff Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f9f583af71764a7127e4da73677ed5c8dea97bfb Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0447153ded16f86775cdcb337f8576579c1ac0db Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..243fa559c2be0f61720e1a333e43d4da2e8b5516 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ce4cbac674f51eb2063ff475a0e70484b25ace9f Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..55063658b2d25baf50b6a3963c508233f695b3de Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..4a7a8e4b537b532f4f58717f3366e9cb6f0ff0ce Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b320c9d42b93b26981927265e5dc62b85e73263f Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d2520242b28d239ba3b138c17e65f8ae6103a787 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..0648aa9f6a2ea3b3ddd083471f1c8dacbb4bbc4f Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..134f669d9fbee6ca61a9c9bb36227376ed97d0a2 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3365ab16bb0cd576f88b808e9af9b024111a5070 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..778ea557232b0fb41df34d63353c219a371660b2 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fd5c901f4bb0e8ed59d2d040740021a7c5cf1b19 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f74846f3f35230a9e14c0bf98e4cccfec593b4c9 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ee6d31a6a025d92f9fb05550483fb6b415b7f066 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..52835b2540d6cb8aaffe22bd5c7c24203cc77538 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..dba6fdd40e73a6dfc06a462bc2646c2bfb7e6d5a Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c13bcb9104e907195d13bbb4f998c1e5594cc2e8 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..34be9026612b9553f55f5f1aed148fc96a9d8fcb Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3750b8c1cfae96e79305618c78653ac5fb9b6de5 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..327ee706dcf46e428dd6339b9744e77941e5b498 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c47febca79456ba4d389bbb46ea0e11e6a41bede Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6ff58aed923740c9a4f639b9a512dcfd08df921 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3aa01f1ee73130e4983d2c3520220b29995c4ccc Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..288bf56e1b4f5c5a2bb2152887c5ef12e6cddeae Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..f4f1af783043658e2b3879560b6e1ff0b8db66a1 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..85cefa6d7b5b644fb99075f6621ca60beb350cd9 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..c4b2b999ce72dcfdd9b63d6a62d0a2d7e2037034 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1d6d1c50a344b45ba375d53775b70ad5aac58f7 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..18e59c8c41de73e02e77298e981fa7e3051e4b5d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a8f5dfe1b83122a9f085da1aa7fff451ed88e783 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b56dc751c1cc7dff75ed80ccbb45f027058e8ce Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar new file mode 100644 index 0000000000000000000000000000000000000000..10d10b62a49ad095f56d620620ee7eaa5d2fc62d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..3595c4f9be5c0ce779f8dd611e7f6917ca518f5d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a3336373b7aea1700b62d9aa60a15493586c3e8a Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..71079166b7bc51455b1e1d18ea4e5e942b3ae89f Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cbb633729cae09e5d65aefccd7b63c697f42b5cb Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eecfb70406fbaca61c7c9e5a549f77cbef2e849b Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a690bed6986df8a510ee4f05b2079264db7d71af Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5023670d73e75c539b0af285d35c4e9edaef2211 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eb3c2b0b82115e9820f781e944312b4c19b25ed4 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..705f285c9348d57ec059c73b90ed9836f4db6aa4 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..684cc24821451b65b9fc36376131490a9d03b37c Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..36f71a550601f4842536ad1a62a187a30c60eb69 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..b4f917b9687dc231c8f3b4cf3fc0a95e616846ea Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..878cc677337985f59ed9f4bb5cfcdb8ca4d0acbe Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..9afa6d70ae20b7082f786920e918fd70c138a5b3 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b626ceafb52c318581529fafbd1d33889f25c20 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..043052d031f59a3b289cc2bb7dda9b8352c58e11 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..316bdec6ab1a6ea4cd4dc33c9217cdf96e4c9049 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..73e95705d3152472d11fa2a5690626b652ff280a Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..7192fa0e49a0762ad44c218215e6197dd12197b2 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..4ecfb5dbec2f25d201de0a83d1143729830d49bd Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..072d06d46d38bcb7a63efb38075bf79ea111caf1 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e19ecdbd4d53bc0c09246f73926aaae70fe9493 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..08c1d314876730dc6f82ba65e741f03b8719609d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..3427ee30e0e35e876eda5e5f1bed695d8c2636e9 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5facdd8e2f6e4f4fa3ceecc16962b3b15b9627c2 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..575355b83ce351f70a605a46ef5fa7f75f956835 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..39f5787fb3d458de0fd9f575345176b60e050961 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..a1e0f39535831b6f914098b776513c284c50b351 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..9637836909b5e59f7a83f88412a8571b4641f56b Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..93bc0ad010aab29e0a328d2ffafaeb60c82b3ecf Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..f06de9ed2afaa3668bccce11dcfe8924446817d0 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9f5b95529a5c7ac6044ffa4c60e6d412a1597104 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..9edc36a8a46a29b0e5a507fbd092a0e6629b8db7 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fe2973fcc47beca49e8e4061fe60a78c2a27102 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..0aedb22b8903d6eb70232c1e310266e0465b5f54 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..084d172fc3512a05145851e902ab1377a9d8b814 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..b740a4a827c0808baeda7112a5c40158e4eba664 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f99a4fc63360f9f5d1f0d751928b0b93d62ac81 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..75e3104ca9d784fa7fcaca27ecd239c02ac6e241 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3eab7c30fefb9c141226da5595a5faf9ff83d462 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..196cc61fca2b75593a6ac4cd38744fb6fcdc33bd Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..070baec335bcc35a79bd1ddf7ee52e809331c4c3 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..80221e424d28060c8620964fef488107c54f4e94 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c607cbdf717926be37b821d11c9c4b0ed9a2802 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..17262bba0ff1a57d9e893d832f8268389adfe340 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..30ef9922f2c8fe0c6a28cd108f33f9632fe82411 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b3a47cef61d53f9997148c1502dbb4ec313a6a47 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5b530ad1e484936df857104583ffcaf78b1715e3 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..96950282b9778bf3a3bd26d4c9125ffdc0f95c96 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b447f8aeb87c42e4489c0db15f30f93d4707fa45 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..11a94c16234d6e460c6e66a840ac7976e756c458 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d325421d690f179c4626844df98915a36e74ea Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0089e0cb796e6183029df4847b1b600d974a8b1a Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..6abfe2c3ec3923ddea06c4e6f4a4a440d72fa649 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar new file mode 100644 index 0000000000000000000000000000000000000000..99c263dc3f71a215a4a85901538f1dedacb51acf Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d9dbbe1b73c8f5435b7309f77520f6cbff82600c Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5d3b9086c238e3c3dbb2af68ce47f52eb269c664 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c03be12b0b2c7549c2db53ed275e45cd87e8a4dc Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0bf27883a28a42e50d17bca78ce368d79acaea13 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f440dcb8f735c055768cd95c2bc4457c4cbbb48 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e7adeb414fcb6108390beabc72d94e7f933e055d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fba12107dee341bd5894a0e05deb09280561e517 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..48ec53f0dba9ea734d83d21f7d793bb2178bb8d1 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff161c12c4949a473a1d2ed1f597d6338337a321 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..577b2bb43f4750ecb4f5c1b9ba952d3866bcea22 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..10a28407ba7c16165164c35be0f9657812349a84 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d0ee5b30dbe7a8cb00c15600142ef6041a97f44 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e3c7750dd2c267fea46f510636ae576bf26894a Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a38e1850efbb73aaf248318253b11e0d0a64a50e Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9e257c119006bed0c77c8b8a6b2e05adfc71dde6 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a85e9e76c2870fca83485b31334a81a02f12781 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-client-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f50f458b6835e03ae1482f4cace0e67d097de9f6 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-client-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-client-config-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-client-config-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..bccd314427f4e52005a2e21fb2c519f54006d176 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-client-config-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-client-jackson-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-client-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c83bc4a897381ffe05df82c02b7cb5adfecf3233 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-client-jackson-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-client-jaxrs-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-client-jaxrs-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1c5413d7754ef2458967fd2b519ce32ff608212d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-client-jaxrs-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..548a6405f93dd3254c1123ecbcce2f96f6995e30 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f65840267108fe2b704976f14e3c17bd02290e20 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..68b053f16299ba0a9bcd80c6e795c2c9e0b186a1 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..89aaf8be3250033348a32ac75c1edc52d9d264ac Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..add785757c2a958f86e3a20c6034d46a38427d83 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e6f98b3a199f27f5c5ab47000862c0558116f3ef Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa88a2ec11f998225646b08795fedc215e530e6f Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cfd8c45d73d6d6e8da9c8fd24e04b2dcb86bff83 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6438a41b88c1605a994451cda5657f0a52669ad Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4bb4f9457907978ea011d1fc390c6ab2e6217888 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..494d0bb23d9ae6619c2a8f2e18ec458676f39316 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8ec4c28f2f004dc6d7b8ca0d36eeaa4b79f4a4bd Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d5bfe9c8b3c66f75327e6ce621da822c6b67fc Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-stork-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-stork-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2cc9308b0499db11c2eedc71d932b826fdf422bf Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-stork-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..90607ec6492a8667ea6d2c39f77389e4083a2017 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8f8612553283b7f00d9dc7ab7eb5313b2f1eca2 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b0c51a8f29ef87a1c6390dd98c2e1e6aa0114f7 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1dcea88498ba439a3ac32e291067331c99331ace Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..92721ae048041c7dbddda0c88d417d740f3c299d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3199cb971aa88ab386df667ad1d16d74d3bee22f Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c0ebec7fa19a8e96489714a2f9986017522becfb Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-client-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c23eb949a463b044e22aa04a1bd48afd1f866aa6 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-client-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4696de7fd6cd077015cad5212b223de1dcc80307 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..04bc9cc78782c3b7bc78553481b94a10a02f8f9d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..14d1445dba365c803f94f24f6fcb8467d13cf6e5 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5859e692a03f1173aa1f2b272a844c94d24c12e3 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c8cb76d3f0d9c0853168e740754f4d79042a105c Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b038ad4a83ac6df0e0a8f71265877dad2ded43ab Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c63764d56c7a95f9abf5230b80fae1a414651b61 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df5d82b54357c697c0d95fd9b8db277facfe0cde Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..07d3721a9c5c5e314e5eee21ec168139bcfed6c4 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8f7f3668d903b867c021d47fd08c02c45374e29c Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a774ecd7174486d0c6d077c388f1021c3b2ef05 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f90f28b0d6afa7518cd3095f64d8dbd9fd56b77d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ba5add79fc5dee32ae8d3dfc0ffeebe75541b603 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..d32bd30fa7bfa74da1330810bc3eb3c07f174385 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..cc828adf40879d957e266f0b781f7bb3d864af07 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..08a59e5bb2d5572b0fdbe693f48a3f0a5073d4f0 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b821617242260f35360fb4247a01d66dea563004 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9908f1069718a85031db676224857e26dc7401f0 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d09f435a3cd1ae0395b4926c78311be6b276e0c2 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9bf5d12b59534df460f6ccb01dad8c8ffcd8a542 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3c5639431a5d51ef16ef94d82225effb271e8467 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d55e5f07ddae6c98b08fbff4848461fdbb2adf73 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..760d92fc11fe3672030cdd7c71416ad75b8ac775 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7504902f7621ee1764e207e2f5b2ffdeaa22e261 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..11867e294ef35ca6e9a6a82fb52dd52821c35d48 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e268ff4ef4543b76568d66444df43ce45751d3bc Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..662cf14d5c427f09ea259c484befd4cc2d328b2d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..417d8b59b8ce5998e21e0275beb878ac50008bd8 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d3ab26e2476843ebb0d27c983443ff9a7ba3fee8 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f08a939bd2b90b9f87dc3f15ab88e15e8c48087e Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..abb08bb751b0e037476fc15d4da40a44d5432966 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57bc48ad35c44ab05ee4f8575f552ea0dd3b04c2 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..98d6fa439ad69c1dc9436d76d43e3a34636729ad Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..373471c5b69b1468fff9a50d614c1b84fa69fbdd Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff59cf0f6f80ce2f55e1f08afbd1750c7df6e2fc Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..49443391feae0e4b18c570a3a007db6187108031 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..723430b5a2624a415e2754bad5f69fd151e71dd3 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6169a6b468ab05de2c7962fc64ce8567f46c444 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b7cc8f08c3aa4845416101f90521f8736eef6973 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f1d68c985deb0d3b5c865a91ca3489e94d623dcf Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..99dc47ca15667630606af6d41f5e616acd66b168 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c7f07c892fbe634044ba8b8333aac2195d84a4b0 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..97a49b08f496d01d6a50e3b2455476c5b23ba50d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ece3eb2d02159abb38ed4ea6b8fc4b599616c79e Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9ed9c88db7be8648c5b501b787a1addf9f0c1981 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b8cc43f8b9896a24d916d7bfdbdd02e396d7db1 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b232bcabeac47c30ac02371188d10bdb5d214a88 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8701dd9d98c6c7d61ea547823759f909474f7a9d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..473ce54348410627710157274dd8d2fdfa39c0df Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..16255cf4093cd574a00574f812ee5109478fe9a4 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.stork.stork-api-2.6.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.stork.stork-api-2.6.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e24348ed3ef5e3f1a736be21f4ad3d3c0c8bcf2e Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.stork.stork-api-2.6.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.stork.stork-core-2.6.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.stork.stork-core-2.6.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f7cbed7d30c009b3a1859cd4638e4d43f94519c5 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.smallrye.stork.stork-core-2.6.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c3c712e90ce6ebdae145eec147d6d4a50bd0fe53 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..354030f4393f7f920b6e03ff894d8e5ea8727797 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..284a20484992c849fd9a5fb0f83f8f639f5d562f Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..467756a558a61103b2dc767e0833e561540ed8fd Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..477c706ef15c0f6622b55e64d3cb623cb3fd3ebd Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..305727ab77b9dcd6df44f8868efc6859b51a419e Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8c00aa871496b3293e1c008449162d449b9f64ea Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8a4fe6ed7c025ce502f81bbee92b36230457d647 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c322385c3404a0196e8d509f2f7cb892fcb0883a Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..2e158629507bc39be5c13b1b31fa3563e0ac44b1 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..b16bdbbc285bd848e6dac42208acfa3d6bfa6fab Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..0d015d54205c084ad31609cc2909853fee83476a Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..34c1d4394b358572a2c79b543cb7d094b0ba1b8d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..316080f3b56134e8a821a50511b0e831886c9184 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5edfd71412f1470c93366d821b353ebaa85ecac3 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a3214715591141e2d6fbb0f5b71f52126a571a5 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a92e099d4fc25523e2830fa9b8181d319c9369a7 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a5b5b5fc36bb88093fd25a30b2d1d7fbe9e3cba Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..dbcbea90f1434f04d12c2039f9213c704d82ec31 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a5d9089734b0a7061dc14c4afc35884cc507636 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1e7da4be43dd1a10393608d1aff9c7a87460461 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..80670a1b87a7680fbac8c690d599361f8dd8d2ea Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..48242829bc38133a9cdcd36f8b2a9eebc53ab91a Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bffe4865f5835839900292dce062bb2f24921d76 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d8c4c56c7943f2fb3fcf2207f77f3bb7dd623550 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57b4cba6199355aa6c8fc9ad96e01f122e8613ff Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c4fe13ea86560b5be41f0602c8cef57dac8fed8 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d3e9ba64ae347356342ce7bd81694068426ce4 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..29fb4d66863e0ed67c991f16d7224d8e1b88ed98 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..83006f2ec04934050d5d41b033761f8c6f93c80a Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..66bc6931d673d72136bf7634d9d0653576bb05de Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fb9dadfc4843c7b3bd48544fe9c39068014ae49 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57eb121f34753b359ed328615b12d1335cdef5e0 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..666c3e4d030b41a58901b540d76547698cea41b4 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f5768a039af60a776a24b738659227810a5fc563 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b2d092cdf3c4b145df42a44b536ed0f8a604574c Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..08f353ce447773c14714390536f2abde814dff5b Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df32ff7e11ad182102a805c854bfc71cf2fec90e Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a93a240a399ca96f7f6298a849de2fdc996f066d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8b05ebef8ea87d46d09eac112c1e8a5a9a549fc Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..eab628714f1e46bbabce09f59d5149071cb7b8ee Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bcf2ba147eb2b4d9c75a637aed2c96628a8369cd Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e12f44ed11522b1f45d2935972336fb6852cb26d Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7b0a2bef9c92372239c8136f4bacad2e44297a95 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..97e7161abe0534de78448dde1d119002283ced55 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a4e88280321db778bbdff6fce3aed8e6ce6e4980 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..da9302ff29a560b5f10d3184f25d699fe2d9c186 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..2bb7c07363c9a44ea63fe96c827a34e296b8188c Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..9a8ac703dcd1b00c37aa6f8dc9a8a9b3d42145f6 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar new file mode 100644 index 0000000000000000000000000000000000000000..f0bdebeb94bce461c49ded7e28d6e6c63bf6a367 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b3c8ff0adc42f592363a883cd691d292aada837 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa7fdabb307af8221e7e0a1526f2c97d6ba35ec4 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..49953e8fa25ed42f4127011561a6e84869fe5d82 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..d139727d04b6b6acdfcb520566c8c60cbbcb7fb1 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6077c71e50c276649060a8fac39f6384fa67019 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d2e53ffaa9f0685843fd2b35fe18afd543249a Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c9b38d9c315bae8eb8c7d4eeacb26a8f2ca16085 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..50933a1fdc4d2f285542845bb89f9b34cef192f2 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..45581840b5a7e858949ee6198f8a1f7bd772fb32 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.rest.client.microprofile-rest-client-api-3.0.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.rest.client.microprofile-rest-client-api-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..da48979976ae4a6600df6a0876a8bc3ba73b4455 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.microprofile.rest.client.microprofile-rest-client-api-3.0.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar new file mode 100644 index 0000000000000000000000000000000000000000..e3432492d5d204a1745e138497d9ede28ffb854e Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d2ade3e92ab51059b64ce3026377e1bd5991777 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ed4bb45df0ea4a6cb61ce9c7247b6de29ab4f98a Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..816ea17eb611606001129921b297615bcd2419fd Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..ffc43704764cd535486c28401c92f2548bd52c5f Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..0e9fcc76760c4fc3fe5fbce69a4a37783d912a11 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..d7987d7c1b270f153557179abaf61c87ed62e875 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d218bacf88c766dba04ef14130fd7e69181ffed Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e90b7796c1a5d7264bc613050e05f20a674c010 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa3474de5d48916f2ded456deab5671a1375645b Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..657afffffdf6324a62474b9499dd50cf9529609f Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..26cbae47e5f0db078fc43501ed3d1b4f95a1befa Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a66a92ba5247b5a1f8e201d11643e4fb11c2a8c Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..8da196f20fde587682295ac0c90f31ba4ab23815 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..31cf0b60867242d385d764dcea99adadf7ed6ded Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..89c644b8e286e9da107d81de25f1be0fe6447607 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..d530cd128ec0d314490c0e1e5ef68479cd23d366 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..091b4d13a417d635f5a1d7a42b482f88a6f3bf65 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..e58c483f97589c9712eda2273a37e25344573390 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar new file mode 100644 index 0000000000000000000000000000000000000000..a2cb8020a5afda869b487e2f9d172dcd1e9795bf Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..511ff3211d9b29bce06c3576ddcf0139fc874bb0 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..7707e5878b8525da8750949186a3ab1056ecb5c5 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..275dd5700a389ba1902a02d49e465157942368ce Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus-app-dependencies.txt b/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus-app-dependencies.txt new file mode 100644 index 0000000000000000000000000000000000000000..e6626e43c189c86a20548341f9946a83701ba5e3 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus-app-dependencies.txt @@ -0,0 +1,267 @@ +com.aayushatharva.brotli4j:brotli4j::jar:1.16.0 +com.aayushatharva.brotli4j:native-linux-x86_64::jar:1.16.0 +com.aayushatharva.brotli4j:service::jar:1.16.0 +com.cronutils:cron-utils::jar:9.2.1 +com.fasterxml.jackson.core:jackson-annotations::jar:2.17.2 +com.fasterxml.jackson.core:jackson-core::jar:2.17.2 +com.fasterxml.jackson.core:jackson-databind::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-toml::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-yaml::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jdk8::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jsr310::jar:2.17.2 +com.fasterxml.jackson.module:jackson-module-parameter-names::jar:2.17.2 +com.github.ben-manes.caffeine:caffeine::jar:3.1.5 +com.github.filosganga:geogson-core::jar:1.2.21 +com.github.filosganga:geogson-jts::jar:1.2.21 +com.github.luben:zstd-jni::jar:1.5.6-3 +com.google.code.gson:gson::jar:2.11.0 +com.google.errorprone:error_prone_annotations::jar:2.30.0 +com.google.guava:failureaccess::jar:1.0.1 +com.google.guava:guava::jar:33.2.1-jre +com.google.j2objc:j2objc-annotations::jar:2.8 +com.ongres.scram:client::jar:2.1 +com.ongres.scram:common::jar:2.1 +com.ongres.stringprep:saslprep::jar:1.1 +com.ongres.stringprep:stringprep::jar:1.1 +com.vividsolutions:jts-core::jar:1.14.0 +commons-codec:commons-codec::jar:1.17.1 +commons-io:commons-io::jar:2.16.1 +eu.neclab.ngsildbroker:commons::jar:5.0.5-SNAPSHOT +io.agroal:agroal-api::jar:2.5 +io.agroal:agroal-narayana::jar:2.5 +io.agroal:agroal-pool::jar:2.5 +io.github.crac:org-crac::jar:0.1.3 +io.netty:netty-buffer::jar:4.1.111.Final +io.netty:netty-codec-dns::jar:4.1.111.Final +io.netty:netty-codec-haproxy::jar:4.1.111.Final +io.netty:netty-codec-http2::jar:4.1.111.Final +io.netty:netty-codec-http::jar:4.1.111.Final +io.netty:netty-codec-mqtt::jar:4.1.111.Final +io.netty:netty-codec-socks::jar:4.1.111.Final +io.netty:netty-codec::jar:4.1.111.Final +io.netty:netty-common::jar:4.1.111.Final +io.netty:netty-handler-proxy::jar:4.1.111.Final +io.netty:netty-handler::jar:4.1.111.Final +io.netty:netty-resolver-dns::jar:4.1.111.Final +io.netty:netty-resolver::jar:4.1.111.Final +io.netty:netty-transport-classes-epoll::jar:4.1.111.Final +io.netty:netty-transport-classes-kqueue::jar:4.1.111.Final +io.netty:netty-transport-native-epoll:linux-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-kqueue:osx-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-unix-common::jar:4.1.111.Final +io.netty:netty-transport::jar:4.1.111.Final +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api-incubator::jar:2.5.0-alpha +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api::jar:2.5.0 +io.opentelemetry.semconv:opentelemetry-semconv::jar:1.26.0-alpha +io.opentelemetry:opentelemetry-api-incubator::jar:1.39.0-alpha +io.opentelemetry:opentelemetry-api::jar:1.39.0 +io.opentelemetry:opentelemetry-context::jar:1.39.0 +io.quarkiverse.loggingmanager:quarkus-logging-manager::jar:3.1.2 +io.quarkiverse.systemd.notify:quarkus-systemd-notify::jar:1.0.2 +io.quarkus.arc:arc::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-client::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common-types::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-jackson::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-vertx::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive::jar:3.14.1 +io.quarkus.security:quarkus-security::jar:2.1.0 +io.quarkus.vertx.utils:quarkus-vertx-utils::jar:3.14.1 +io.quarkus:quarkus-agroal::jar:3.14.1 +io.quarkus:quarkus-arc::jar:3.14.1 +io.quarkus:quarkus-bootstrap-runner::jar:3.14.1 +io.quarkus:quarkus-cache-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-cache::jar:3.14.1 +io.quarkus:quarkus-caffeine::jar:3.14.1 +io.quarkus:quarkus-classloader-commons::jar:3.14.1 +io.quarkus:quarkus-container-image-docker-common::jar:3.14.1 +io.quarkus:quarkus-container-image-docker::jar:3.14.1 +io.quarkus:quarkus-container-image::jar:3.14.1 +io.quarkus:quarkus-core::jar:3.14.1 +io.quarkus:quarkus-credentials::jar:3.14.1 +io.quarkus:quarkus-datasource-common::jar:3.14.1 +io.quarkus:quarkus-datasource::jar:3.14.1 +io.quarkus:quarkus-development-mode-spi::jar:3.14.1 +io.quarkus:quarkus-flyway-postgresql::jar:3.14.1 +io.quarkus:quarkus-flyway::jar:3.14.1 +io.quarkus:quarkus-fs-util::jar:0.0.10 +io.quarkus:quarkus-info-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-info::jar:3.14.1 +io.quarkus:quarkus-jackson::jar:3.14.1 +io.quarkus:quarkus-jdbc-postgresql::jar:3.14.1 +io.quarkus:quarkus-jsonp::jar:3.14.1 +io.quarkus:quarkus-kafka-client::jar:3.14.1 +io.quarkus:quarkus-messaging-kafka::jar:3.14.1 +io.quarkus:quarkus-messaging-kotlin::jar:3.14.1 +io.quarkus:quarkus-messaging::jar:3.14.1 +io.quarkus:quarkus-mutiny-reactive-streams-operators::jar:3.14.1 +io.quarkus:quarkus-mutiny::jar:3.14.1 +io.quarkus:quarkus-narayana-jta::jar:3.14.1 +io.quarkus:quarkus-netty::jar:3.14.1 +io.quarkus:quarkus-reactive-datasource::jar:3.14.1 +io.quarkus:quarkus-reactive-pg-client::jar:3.14.1 +io.quarkus:quarkus-rest-client-config::jar:3.14.1 +io.quarkus:quarkus-rest-client-jackson::jar:3.14.1 +io.quarkus:quarkus-rest-client-jaxrs::jar:3.14.1 +io.quarkus:quarkus-rest-client::jar:3.14.1 +io.quarkus:quarkus-rest-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson::jar:3.14.1 +io.quarkus:quarkus-rest::jar:3.14.1 +io.quarkus:quarkus-scheduler-api::jar:3.14.1 +io.quarkus:quarkus-scheduler-common::jar:3.14.1 +io.quarkus:quarkus-scheduler-kotlin::jar:3.14.1 +io.quarkus:quarkus-scheduler-spi::jar:3.14.1 +io.quarkus:quarkus-scheduler::jar:3.14.1 +io.quarkus:quarkus-security-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-smallrye-context-propagation::jar:3.14.1 +io.quarkus:quarkus-smallrye-health::jar:3.14.1 +io.quarkus:quarkus-smallrye-metrics::jar:3.14.1 +io.quarkus:quarkus-smallrye-openapi::jar:3.14.1 +io.quarkus:quarkus-smallrye-stork::jar:3.14.1 +io.quarkus:quarkus-swagger-ui::jar:3.14.1 +io.quarkus:quarkus-tls-registry::jar:3.14.1 +io.quarkus:quarkus-transaction-annotations::jar:3.14.1 +io.quarkus:quarkus-vertx-http::jar:3.14.1 +io.quarkus:quarkus-vertx-latebound-mdc-provider::jar:3.14.1 +io.quarkus:quarkus-vertx::jar:3.14.1 +io.quarkus:quarkus-virtual-threads::jar:3.14.1 +io.smallrye.common:smallrye-common-annotation::jar:2.5.0 +io.smallrye.common:smallrye-common-classloader::jar:2.5.0 +io.smallrye.common:smallrye-common-constraint::jar:2.5.0 +io.smallrye.common:smallrye-common-cpu::jar:2.5.0 +io.smallrye.common:smallrye-common-expression::jar:2.5.0 +io.smallrye.common:smallrye-common-function::jar:2.5.0 +io.smallrye.common:smallrye-common-io::jar:2.5.0 +io.smallrye.common:smallrye-common-net::jar:2.5.0 +io.smallrye.common:smallrye-common-os::jar:2.5.0 +io.smallrye.common:smallrye-common-ref::jar:2.5.0 +io.smallrye.common:smallrye-common-vertx-context::jar:2.5.0 +io.smallrye.config:smallrye-config-common::jar:3.9.1 +io.smallrye.config:smallrye-config-core::jar:3.9.1 +io.smallrye.config:smallrye-config::jar:3.9.1 +io.smallrye.reactive:mutiny-reactive-streams-operators::jar:2.6.2 +io.smallrye.reactive:mutiny-smallrye-context-propagation::jar:2.6.2 +io.smallrye.reactive:mutiny-zero-flow-adapters::jar:1.1.0 +io.smallrye.reactive:mutiny-zero::jar:1.1.0 +io.smallrye.reactive:mutiny::jar:2.6.2 +io.smallrye.reactive:smallrye-mutiny-vertx-auth-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-bridge-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-core::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-mqtt::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-pg-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-runtime::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-sql-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-uri-template::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web::jar:3.14.0 +io.smallrye.reactive:smallrye-reactive-converter-api::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-converter-mutiny::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-messaging-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-camel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-health::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-in-memory::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-otel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-provider::jar:4.24.0 +io.smallrye.reactive:vertx-mutiny-generator::jar:3.14.0 +io.smallrye.stork:stork-api::jar:2.6.1 +io.smallrye.stork:stork-core::jar:2.6.1 +io.smallrye:jandex::jar:3.2.2 +io.smallrye:smallrye-context-propagation-api::jar:2.1.2 +io.smallrye:smallrye-context-propagation-jta::jar:2.1.2 +io.smallrye:smallrye-context-propagation-storage::jar:2.1.2 +io.smallrye:smallrye-context-propagation::jar:2.1.2 +io.smallrye:smallrye-fault-tolerance-vertx::jar:6.4.0 +io.smallrye:smallrye-health-api::jar:4.1.0 +io.smallrye:smallrye-health-provided-checks::jar:4.1.0 +io.smallrye:smallrye-health::jar:4.1.0 +io.smallrye:smallrye-metrics::jar:4.0.0 +io.smallrye:smallrye-open-api-core::jar:3.10.0 +io.vertx:vertx-auth-common::jar:4.5.9 +io.vertx:vertx-bridge-common::jar:4.5.9 +io.vertx:vertx-codegen::jar:4.5.9 +io.vertx:vertx-core::jar:4.5.9 +io.vertx:vertx-mqtt::jar:4.5.9 +io.vertx:vertx-pg-client::jar:4.5.9 +io.vertx:vertx-sql-client::jar:4.5.9 +io.vertx:vertx-uri-template::jar:4.5.9 +io.vertx:vertx-web-client::jar:4.5.9 +io.vertx:vertx-web-common::jar:4.5.9 +io.vertx:vertx-web::jar:4.5.9 +jakarta.activation:jakarta.activation-api::jar:2.1.3 +jakarta.annotation:jakarta.annotation-api::jar:3.0.0 +jakarta.el:jakarta.el-api::jar:5.0.1 +jakarta.enterprise:jakarta.enterprise.cdi-api::jar:4.1.0 +jakarta.enterprise:jakarta.enterprise.lang-model::jar:4.1.0 +jakarta.inject:jakarta.inject-api::jar:2.0.1 +jakarta.interceptor:jakarta.interceptor-api::jar:2.2.0 +jakarta.json:jakarta.json-api::jar:2.1.3 +jakarta.resource:jakarta.resource-api::jar:2.1.0 +jakarta.transaction:jakarta.transaction-api::jar:2.0.1 +jakarta.ws.rs:jakarta.ws.rs-api::jar:3.1.0 +jakarta.xml.bind:jakarta.xml.bind-api::jar:4.0.2 +org.apache.camel.quarkus:camel-quarkus-core::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-reactive-streams::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-smallrye-reactive-messaging::jar:3.14.0 +org.apache.camel:camel-api::jar:4.7.0 +org.apache.camel:camel-base-engine::jar:4.7.0 +org.apache.camel:camel-base::jar:4.7.0 +org.apache.camel:camel-componentdsl::jar:4.7.0 +org.apache.camel:camel-core-catalog::jar:4.7.0 +org.apache.camel:camel-core-engine::jar:4.7.0 +org.apache.camel:camel-core-languages::jar:4.7.0 +org.apache.camel:camel-core-model::jar:4.7.0 +org.apache.camel:camel-core-processor::jar:4.7.0 +org.apache.camel:camel-core-reifier::jar:4.7.0 +org.apache.camel:camel-endpointdsl::jar:4.7.0 +org.apache.camel:camel-main::jar:4.7.0 +org.apache.camel:camel-management-api::jar:4.7.0 +org.apache.camel:camel-microprofile-config::jar:4.7.0 +org.apache.camel:camel-reactive-streams::jar:4.7.0 +org.apache.camel:camel-support::jar:4.7.0 +org.apache.camel:camel-tooling-model::jar:4.7.0 +org.apache.camel:camel-util-json::jar:4.7.0 +org.apache.camel:camel-util::jar:4.7.0 +org.apache.camel:camel-xml-jaxp-util::jar:4.7.0 +org.apache.commons:commons-lang3::jar:3.14.0 +org.apache.httpcomponents:httpclient-cache::jar:4.5.14 +org.apache.httpcomponents:httpclient::jar:4.5.14 +org.apache.httpcomponents:httpcore::jar:4.4.16 +org.apache.kafka:kafka-clients::jar:3.7.1 +org.checkerframework:checker-qual::jar:3.46.0 +org.eclipse.microprofile.config:microprofile-config-api::jar:3.1 +org.eclipse.microprofile.context-propagation:microprofile-context-propagation-api::jar:1.3 +org.eclipse.microprofile.health:microprofile-health-api::jar:4.0.1 +org.eclipse.microprofile.metrics:microprofile-metrics-api::jar:4.0.1 +org.eclipse.microprofile.openapi:microprofile-openapi-api::jar:3.1.1 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-api::jar:3.0 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-core::jar:3.0 +org.eclipse.microprofile.rest.client:microprofile-rest-client-api::jar:3.0.1 +org.eclipse.parsson:parsson::jar:1.1.7 +org.flywaydb:flyway-core::jar:10.17.1 +org.flywaydb:flyway-database-postgresql::jar:10.17.1 +org.glassfish.expressly:expressly::jar:5.0.0 +org.jboss.invocation:jboss-invocation::jar:2.0.0.Final +org.jboss.logging:commons-logging-jboss-logging::jar:1.0.0.Final +org.jboss.logging:jboss-logging-annotations::jar:3.0.1.Final +org.jboss.logging:jboss-logging::jar:3.6.0.Final +org.jboss.logmanager:jboss-logmanager::jar:3.0.6.Final +org.jboss.narayana.jta:narayana-jta::jar:7.0.2.Final +org.jboss.narayana.jts:narayana-jts-integration::jar:7.0.2.Final +org.jboss.slf4j:slf4j-jboss-logmanager::jar:2.0.0.Final +org.jboss.threads:jboss-threads::jar:3.6.1.Final +org.jboss:jboss-transaction-spi::jar:8.0.0.Final +org.jctools:jctools-core::jar:4.0.5 +org.locationtech.jts:jts-core::jar:1.18.2 +org.locationtech.spatial4j:spatial4j::jar:0.8 +org.lz4:lz4-java::jar:1.8.0 +org.noggit:noggit::jar:0.8 +org.postgresql:postgresql::jar:42.7.4 +org.reactivestreams:reactive-streams::jar:1.0.4 +org.slf4j:slf4j-api::jar:2.0.6 +org.wildfly.common:wildfly-common::jar:1.7.0.Final +org.xerial.snappy:snappy-java::jar:1.1.10.5 +org.yaml:snakeyaml::jar:2.2 diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus-run.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus-run.jar new file mode 100644 index 0000000000000000000000000000000000000000..9521cceb3d8f55e5b2eea1f5ff303c552a104fcb Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus-run.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus/generated-bytecode.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus/generated-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..ba19feb4aaa34f0b43e0565d81474ee17e2a89ef Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus/generated-bytecode.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus/quarkus-application.dat b/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus/quarkus-application.dat new file mode 100644 index 0000000000000000000000000000000000000000..a7b78e5d7d1e689fad2452dc2c7d5c6dad886e4a Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus/quarkus-application.dat differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus/transformed-bytecode.jar b/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus/transformed-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..611af07bf6f5f779117aae564b55f1b41f69929e Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/quarkus-app/quarkus/transformed-bytecode.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/quarkus-artifact.properties b/scorpio-broker/SubscriptionManager/target/quarkus-artifact.properties new file mode 100644 index 0000000000000000000000000000000000000000..df1ddc541e0ba361e7632de711558bfcff435b41 --- /dev/null +++ b/scorpio-broker/SubscriptionManager/target/quarkus-artifact.properties @@ -0,0 +1,4 @@ +# Generated by Quarkus - Do not edit manually +metadata.container-image=scorpiobroker/subscription-manager\:5.0.5-SNAPSHOT +metadata.pull-required=false +type=jar-container diff --git a/scorpio-broker/SubscriptionManager/target/subscription-manager-5.0.5-SNAPSHOT.jar b/scorpio-broker/SubscriptionManager/target/subscription-manager-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..db5b60c993ac249965265a5c045505cbbd24dcde Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/subscription-manager-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/SubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/CustomProfile.class b/scorpio-broker/SubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/CustomProfile.class new file mode 100644 index 0000000000000000000000000000000000000000..2a7b59a0de2253277b777dbaac91bc8f8db6487a Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/CustomProfile.class differ diff --git a/scorpio-broker/SubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/NotificationControllerTest.class b/scorpio-broker/SubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/NotificationControllerTest.class new file mode 100644 index 0000000000000000000000000000000000000000..f9cf7ece66f10141bb7b94b897b51f734ef46827 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/NotificationControllerTest.class differ diff --git a/scorpio-broker/SubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionControllerTest.class b/scorpio-broker/SubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionControllerTest.class new file mode 100644 index 0000000000000000000000000000000000000000..21f8ddff3fcc08349c3584e3abccb631202da3e1 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/controller/SubscriptionControllerTest.class differ diff --git a/scorpio-broker/SubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionServiceTest.class b/scorpio-broker/SubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionServiceTest.class new file mode 100644 index 0000000000000000000000000000000000000000..c246c3be47ac4719d69064fcf5cbea32351c84c3 Binary files /dev/null and b/scorpio-broker/SubscriptionManager/target/test-classes/eu/neclab/ngsildbroker/subscriptionmanager/service/SubscriptionServiceTest.class differ diff --git a/scorpio-broker/at-context-server/target/at-context-server-5.0.5-SNAPSHOT.jar b/scorpio-broker/at-context-server/target/at-context-server-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..a0f8f48ee2af90520b22099a928a429682edbb12 Binary files /dev/null and b/scorpio-broker/at-context-server/target/at-context-server-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/at-context-server/target/classes/META-INF/jandex.idx b/scorpio-broker/at-context-server/target/classes/META-INF/jandex.idx new file mode 100644 index 0000000000000000000000000000000000000000..9b3590e68d5333e43bb006fb56de5e4f72f9b5ba Binary files /dev/null and b/scorpio-broker/at-context-server/target/classes/META-INF/jandex.idx differ diff --git a/scorpio-broker/at-context-server/target/classes/application-eureka.properties b/scorpio-broker/at-context-server/target/classes/application-eureka.properties new file mode 100644 index 0000000000000000000000000000000000000000..01b6107ba24bcd76300f25487a7481f9bb453f96 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/application-eureka.properties @@ -0,0 +1,14 @@ +#Eureka settings +quarkus.eureka.enable=true +#quarkus.eureka.port= +#quarkus.eureka.hostname= +#quarkus.eureka.context-path= +#quarkus.eureka.prefer-ip-address= +#quarkus.eureka.name= +#quarkus.eureka.vip-address= +quarkus.eureka.home-page-url=/ +quarkus.eureka.status-page-url=/q/health/ready +quarkus.eureka.health-check-url=/q/health/live +quarkus.eureka.region=default +quarkus.eureka.prefer-same-zone=true +quarkus.eureka.service-url.default=http://localhost:8761/eureka \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/application-kafka-eureka.properties b/scorpio-broker/at-context-server/target/classes/application-kafka-eureka.properties new file mode 100644 index 0000000000000000000000000000000000000000..01b6107ba24bcd76300f25487a7481f9bb453f96 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/application-kafka-eureka.properties @@ -0,0 +1,14 @@ +#Eureka settings +quarkus.eureka.enable=true +#quarkus.eureka.port= +#quarkus.eureka.hostname= +#quarkus.eureka.context-path= +#quarkus.eureka.prefer-ip-address= +#quarkus.eureka.name= +#quarkus.eureka.vip-address= +quarkus.eureka.home-page-url=/ +quarkus.eureka.status-page-url=/q/health/ready +quarkus.eureka.health-check-url=/q/health/live +quarkus.eureka.region=default +quarkus.eureka.prefer-same-zone=true +quarkus.eureka.service-url.default=http://localhost:8761/eureka \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/application-sqs.properties b/scorpio-broker/at-context-server/target/classes/application-sqs.properties new file mode 100644 index 0000000000000000000000000000000000000000..ad3d4798a3d209bb0dddf0105e9ff5bdeba4d601 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/application-sqs.properties @@ -0,0 +1 @@ +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/application.properties b/scorpio-broker/at-context-server/target/classes/application.properties new file mode 100644 index 0000000000000000000000000000000000000000..ee66fd5b7194316e2a6f91854d05337d655415a0 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/application.properties @@ -0,0 +1,27 @@ +quarkus.application.name=at-context-server +quarkus.http.port=2023 +quarkus.log.level=INFO +quarkus.native.additional-build-args=--initialize-at-run-time=org.apache.commons.lang3.RandomStringUtils +mysettings.postgres.host=${dbhost:localhost} +mysettings.postgres.port=${dbport:5432} +mysettings.postgres.username=${dbuser:ngb} +mysettings.postgres.password=${dbpass:ngb} +mysettings.postgres.database-name=${dbname:ngb} +mysettings.gateway.host=${gateway.host:localhost} +mysettings.gateway.port=${gateway.port:9090} +jdbcurl=jdbc:postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name}?ApplicationName=${quarkus.application.name} +scorpio.gatewayurl=http://localhost:9090 +scorpio.at-context-server=http://localhost:9090 +scorpio.directDB=true +quarkus.datasource.db-kind=postgresql +quarkus.datasource.username=${mysettings.postgres.username} +quarkus.datasource.password=${mysettings.postgres.password} +quarkus.datasource.jdbc.url=${jdbcurl} +quarkus.datasource.reactive.url=postgresql://${mysettings.postgres.host}:${mysettings.postgres.port}/${mysettings.postgres.database-name} +quarkus.datasource.reactive.shared=true +quarkus.datasource.reactive.max-size=20 +quarkus.flyway.migrate-at-start=true +quarkus.flyway.baseline-on-migrate=true +quarkus.flyway.connect-retries=10 +quarkus.flyway.repair-at-start=true +quarkus.datasource.reactive.idle-timeout=15s \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20190604.1__entity.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20190604.1__entity.sql new file mode 100644 index 0000000000000000000000000000000000000000..a8259f8bf603988add0925c6985b979640cdc13e --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20190604.1__entity.sql @@ -0,0 +1,57 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS Entity ( + id TEXT NOT NULL, + type TEXT, + data JSONB NOT NULL, + context JSONB, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + observationSpace GEOMETRY(Geometry, 4326), + operationSpace GEOMETRY(Geometry, 4326), + PRIMARY KEY (id)) +; + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER entity_extract_jsonb_fields BEFORE INSERT OR UPDATE ON entity + FOR EACH ROW EXECUTE PROCEDURE entity_extract_jsonb_fields(); + +-- create indexes for performance +CREATE INDEX i_entity_type ON entity (type); +CREATE INDEX i_entity_createdat ON entity (createdat); +CREATE INDEX i_entity_modifiedat ON entity (modifiedat); +CREATE INDEX i_entity_location ON entity USING GIST (location); +CREATE INDEX i_entity_observationspace ON entity USING GIST (observationspace); +CREATE INDEX i_entity_operationspace ON entity USING GIST (operationspace); + +-- to check if this index will be used by the database optimizer, or if it should be applied only for for certain keys +-- check https://www.postgresql.org/docs/current/static/datatype-json.html +CREATE INDEX i_entity_data ON entity USING GIN (data); diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20190604.2__registry.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20190604.2__registry.sql new file mode 100644 index 0000000000000000000000000000000000000000..51d767f1a6ee0368765817ebce039132d1160bd9 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20190604.2__registry.sql @@ -0,0 +1,135 @@ +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS csource ( + id TEXT NOT NULL, + data JSONB NOT NULL, + type TEXT, + name TEXT, + description TEXT, + timestamp_start TIMESTAMP, + timestamp_end TIMESTAMP, + location GEOMETRY(Geometry, 4326), -- 4326 (WGS84) is the Coordinate System defined in GeoJson spec: https://tools.ietf.org/html/rfc7946#section-4 + expires TIMESTAMP, + endpoint TEXT, + internal boolean default false, + has_registrationinfo_with_attrs_only BOOL NOT NULL DEFAULT FALSE, + has_registrationinfo_with_entityinfo_only BOOL NOT NULL DEFAULT FALSE, + PRIMARY KEY (id)) +; + +-- create indexes for performance +CREATE INDEX i_csource_data ON csource USING GIN (data); +CREATE INDEX i_csource_name ON csource (name); +CREATE INDEX i_csource_timestamp_start ON csource (timestamp_start); +CREATE INDEX i_csource_timestamp_end ON csource (timestamp_end); +CREATE INDEX i_csource_location ON csource USING GIST (location); +CREATE INDEX i_csource_expires ON csource (expires); +CREATE INDEX i_csource_endpoint ON csource (endpoint); +CREATE INDEX i_csource_internal ON csource (internal); + +CREATE TABLE IF NOT EXISTS csourceinformation ( + id BIGSERIAL, + csource_id TEXT NOT NULL REFERENCES csource(id) ON DELETE CASCADE ON UPDATE CASCADE, + group_id BIGINT, + entity_id TEXT, + entity_idpattern TEXT, + entity_type TEXT, + property_id TEXT, + relationship_id TEXT, + PRIMARY KEY (id)) +; +CREATE SEQUENCE csourceinformation_group_id_seq OWNED BY csourceinformation.group_id; -- used by csource trigger +-- create indexes for performance +CREATE INDEX i_csourceinformation_csource_id ON csourceinformation (csource_id); +CREATE INDEX i_csourceinformation_entity_type_id_idpattern ON csourceinformation (entity_type, entity_id, entity_idpattern); +CREATE INDEX i_csourceinformation_entity_type_id ON csourceinformation (entity_type, entity_id); +CREATE INDEX i_csourceinformation_entity_type_idpattern ON csourceinformation (entity_type, entity_idpattern); +CREATE INDEX i_csourceinformation_property_id ON csourceinformation (property_id); +CREATE INDEX i_csourceinformation_relationship_id ON csourceinformation (relationship_id); +CREATE INDEX i_csourceinformation_group_property_relationship ON csourceinformation (group_id, property_id, relationship_id); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields_to_information_table(); \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20190604.3__temporal.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20190604.3__temporal.sql new file mode 100644 index 0000000000000000000000000000000000000000..bc9d603a2b3ed51124507c18e77df266bb80c91a --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20190604.3__temporal.sql @@ -0,0 +1,105 @@ +BEGIN; + +CREATE EXTENSION IF NOT EXISTS postgis; + +CREATE TABLE IF NOT EXISTS temporalentity ( + id TEXT NOT NULL, + type TEXT, + createdAt TIMESTAMP, + modifiedAt TIMESTAMP, + PRIMARY KEY (id)) +; + +CREATE TABLE IF NOT EXISTS temporalentityattrinstance ( + internalid BIGSERIAL, + temporalentity_id TEXT NOT NULL REFERENCES temporalentity(id) ON DELETE CASCADE ON UPDATE CASCADE, + attributeid TEXT NOT NULL, + instanceid TEXT, + attributetype TEXT, + value TEXT, -- object (relationship) is also stored here + geovalue GEOMETRY, + createdat TIMESTAMP, + modifiedat TIMESTAMP, + observedat TIMESTAMP, + data JSONB NOT NULL, + static BOOL NOT NULL, + PRIMARY KEY (internalid)) +; +CREATE UNIQUE INDEX i_temporalentityattrinstance_entityid_attributeid_instanceid ON temporalentityattrinstance (temporalentity_id, attributeid, instanceid); + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_extract_jsonb_fields BEFORE INSERT OR UPDATE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_update_static() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + f_count integer; + BEGIN + select into f_internalid, f_count min(internalid), count(1) from temporalentityattrinstance + where temporalentity_id = OLD.temporalentity_id AND attributeid = OLD.attributeid; + IF (f_count = 1) THEN + UPDATE temporalentityattrinstance SET static = true WHERE internalid = f_internalid; + END IF; + RETURN OLD; + END; +$_$ LANGUAGE plpgsql; + +CREATE TRIGGER temporalentityattrinstance_update_static AFTER DELETE ON temporalentityattrinstance + FOR EACH ROW EXECUTE PROCEDURE temporalentityattrinstance_update_static(); + +-- create indexes for performance + +CREATE INDEX i_temporalentity_type ON temporalentity (type); + +CREATE INDEX i_temporalentityattrinstance_data ON temporalentityattrinstance USING GIN (data); + +COMMIT; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20190611.1__sysattrs.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20190611.1__sysattrs.sql new file mode 100644 index 0000000000000000000000000000000000000000..3e24ff4111eb19227e14f1b3a2b90a2d863f337e --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20190611.1__sysattrs.sql @@ -0,0 +1,7 @@ +-- entity +ALTER TABLE entity ALTER data DROP NOT NULL; +ALTER TABLE entity ADD data_without_sysattrs JSONB; + +-- csource +ALTER TABLE csource ALTER data DROP NOT NULL; +ALTER TABLE csource ADD data_without_sysattrs JSONB; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ab6fe3c66556cde740eaf56948e95d4556fa7c9 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20190611.2__extract_functions_optimization.sql @@ -0,0 +1,121 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on another column + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20190703.1__keyvalues.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20190703.1__keyvalues.sql new file mode 100644 index 0000000000000000000000000000000000000000..fae8d021431fadf39732600f684e69f5aa43447a --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20190703.1__keyvalues.sql @@ -0,0 +1 @@ +ALTER TABLE entity ADD kvdata JSONB; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..13b2ff5ba21ce08cd58465e6b7b9240c592f6f5c --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20190704.1__extract_functions_bugfix.sql @@ -0,0 +1,131 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20210206.1__tenant_function.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20210206.1__tenant_function.sql new file mode 100644 index 0000000000000000000000000000000000000000..899626ca4ed38154b7e8344e98e1e0b41459d391 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20210206.1__tenant_function.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20210206.2__tenant_field.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20210206.2__tenant_field.sql new file mode 100644 index 0000000000000000000000000000000000000000..6e5e7a7599f89a684574be098ed4a96d75068c1d --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20210206.2__tenant_field.sql @@ -0,0 +1 @@ +ALTER TABLE csource ADD tenant_id TEXT; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20210206.3__tenant_table.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20210206.3__tenant_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..4ea65d8e5fd612f8a5f0a3cd20d9ae081aba11f1 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20210206.3__tenant_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS tenant ( + tenant_id TEXT NOT NULL, + database_name varchar(255) UNIQUE, + PRIMARY KEY (tenant_id) +); \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20211217.1__subscription_table.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20211217.1__subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..104b878e08881a8de88364102af8b82ac5cd1a1f --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20211217.1__subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20211222.1__registry_subscription_table.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20211222.1__registry_subscription_table.sql new file mode 100644 index 0000000000000000000000000000000000000000..28f87847b253efcabcac9dc467a64ea1774766fa --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20211222.1__registry_subscription_table.sql @@ -0,0 +1,5 @@ +CREATE TABLE IF NOT EXISTS registry_subscriptions ( + subscription_id TEXT NOT NULL, + subscription_request TEXT UNIQUE, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql new file mode 100644 index 0000000000000000000000000000000000000000..b8fc302dd290e0b4a560b3b5bf0c09e5fa0a199a --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20220106.1__extract_functions_update_to_ld.sql @@ -0,0 +1,163 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}')) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}')) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getGeoJson (ldjson jsonb) +RETURNS jsonb AS $geojson$ +declare + geojson jsonb; +BEGIN + SELECT json_build_object('type', substring(ldjson#>>'{@type,0}' from 32),'coordinates',getCoordinates(ldjson#>'{https://purl.org/geojson/vocab#coordinates,0,@list}')) into geojson; + RETURN geojson; +END; +$geojson$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/properties}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationships}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20220124.1__scope_support.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20220124.1__scope_support.sql new file mode 100644 index 0000000000000000000000000000000000000000..40f3e01afad101fbea692822b60923ab63123965 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20220124.1__scope_support.sql @@ -0,0 +1,52 @@ +ALTER TABLE public.entity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes,'{-1}', (i#>'{@value}')) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..77f733a8e2015aac5d0c1190fb0b5bbd6256fd24 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20220125.1__extract_information_functions_bugfix.sql @@ -0,0 +1,96 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/location,0,@value}' ), 4326); + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..0167acd3afc6a30007b262cef29778be77ec9089 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20220125.4__extract_information_functions_bugfix.sql @@ -0,0 +1,103 @@ +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in information table +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields_to_information_table() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_group_id csourceinformation.group_id%TYPE; +BEGIN + + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where csource_id = NEW.id; + END IF; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + -- RAISE NOTICE '%', rec.value; + SELECT nextval('csourceinformation_group_id_seq') INTO l_group_id; + + -- id takes precedence over idPattern. so, only store idPattern if id is empty. this makes queries much easier/faster. + INSERT INTO csourceinformation (csource_id, group_id, entity_id, entity_type, entity_idpattern) + SELECT NEW.id, + l_group_id, + value#>>'{@id}', + value#>>'{@type,0}', + CASE WHEN value#>>'{@id}' IS NULL THEN value#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}' END + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}'); + + INSERT INTO csourceinformation (csource_id, group_id, property_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}'); + + INSERT INTO csourceinformation (csource_id, group_id, relationship_id) + SELECT NEW.id, + l_group_id, + value#>>'{@id}' + FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}'); + + END LOOP; + + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20220126.1__scope_support_2.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20220126.1__scope_support_2.sql new file mode 100644 index 0000000000000000000000000000000000000000..6f7224edef85a212c0e339117292b2fbd78307e1 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20220126.1__scope_support_2.sql @@ -0,0 +1,128 @@ +ALTER TABLE public.csource + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +ALTER TABLE public.temporalentity + ADD COLUMN scopes text[] COLLATE pg_catalog."default"; +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}') ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20220127.1__scope_support_3.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20220127.1__scope_support_3.sql new file mode 100644 index 0000000000000000000000000000000000000000..aef923126f490e1683b02763d8cb70eb7f971c26 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20220127.1__scope_support_3.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.attributetype = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( NEW.data#>>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}' ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..a27bbc3ad1a40b4e5e7ad176746076c6cace0d70 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20220131.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql new file mode 100644 index 0000000000000000000000000000000000000000..7710a0ee88d8dfd878acef4b862d42c051bb0d56 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20220131.2__bugfix_getCoordinates.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION getCoordinates (coordinateList jsonb) +RETURNS jsonb AS $coordinates$ +declare + coordinates jsonb := '[]'; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(coordinateList) + LOOP + IF i ? '@list' THEN + SELECT jsonb_insert(coordinates, '{-1}', getCoordinates(i#>'{@list}'), true) into coordinates; + ELSE + SELECT jsonb_insert(coordinates,'{-1}', (i#>'{@value}'), true) into coordinates; + END IF; + END LOOP; + RETURN coordinates; +END; +$coordinates$ LANGUAGE plpgsql; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql new file mode 100644 index 0000000000000000000000000000000000000000..6b5247225608c9e0224d3e823dcfa651b14cdfb0 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20220204.2__bugfix_getScopes.sql @@ -0,0 +1,13 @@ +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) + LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql new file mode 100644 index 0000000000000000000000000000000000000000..64998eb0a070a7e846fb27e46173897875035395 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20220207.1__bugfix_matchScope.sql @@ -0,0 +1,17 @@ +CREATE OR REPLACE FUNCTION matchScope (scopes text[], scopeQuery text) +RETURNS boolean AS $result$ +declare + i text; +BEGIN + IF scopes IS NULL THEN + return false; + END IF; + FOREACH i IN ARRAY scopes + LOOP + IF i ~ scopeQuery THEN + return true; + END IF; + END LOOP; + RETURN false; +END; +$result$ LANGUAGE plpgsql; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20220215.1__postgis2.4compat.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20220215.1__postgis2.4compat.sql new file mode 100644 index 0000000000000000000000000000000000000000..3fcb41a0d6a8461a015ac825c6a21ec9af3476e9 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20220215.1__postgis2.4compat.sql @@ -0,0 +1,150 @@ +CREATE OR REPLACE FUNCTION entity_extract_jsonb_fields() RETURNS trigger AS $_$ + BEGIN + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + -- is any validation needed? + NEW.type = NEW.data#>>'{@type,0}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/observationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] }] }') THEN + NEW.observationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/observationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.observationSpace = NULL; + END IF; + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/operationSpace": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.operationSpace = ST_SetSRID( ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/operationSpace,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.operationSpace = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + + RETURN NEW; + END; + +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = TRUE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + + + +-- trigger to automatically extract pre-defined ngsi-ld members and store them in regular fields (for query performance) +CREATE OR REPLACE FUNCTION csource_extract_jsonb_fields() RETURNS trigger AS $_$ +DECLARE + l_rec RECORD; + l_entityinfo_count INTEGER; + l_attributeinfo_count INTEGER; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NULL AND NEW.data IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.data IS NOT NULL AND NEW.data IS NOT NULL AND OLD.data <> NEW.data) THEN + NEW.type = NEW.data#>>'{@type,0}'; + NEW.tenant_id = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + NEW.name = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/name,0,@value}'; + NEW.description = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/description,0,@value}'; + NEW.timestamp_start = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/start,0,@value}')::TIMESTAMP; + NEW.timestamp_end = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/timestamp,0,https://uri.etsi.org/ngsi-ld/end,0,@value}')::TIMESTAMP; + IF NEW.data ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.data@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + NEW.location = null; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + NEW.scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + NEW.expires = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + NEW.endpoint = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + NEW.internal = COALESCE((NEW.data#>>'{https://uri.etsi.org/ngsi-ld/internal,0,@value}')::BOOLEAN, FALSE); + + NEW.has_registrationinfo_with_attrs_only = false; + NEW.has_registrationinfo_with_entityinfo_only = false; + + FOR l_rec IN SELECT value FROM jsonb_array_elements(NEW.data#>'{https://uri.etsi.org/ngsi-ld/information}') + LOOP + SELECT INTO l_entityinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/entities}' ); + SELECT INTO l_attributeinfo_count count(*) FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/propertyNames}' ); + SELECT INTO l_attributeinfo_count count(*)+l_attributeinfo_count FROM jsonb_array_elements( l_rec.value#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}' ); + + IF (NEW.has_registrationinfo_with_attrs_only = false) THEN + NEW.has_registrationinfo_with_attrs_only = (l_entityinfo_count = 0 AND l_attributeinfo_count > 0); + END IF; + + IF (NEW.has_registrationinfo_with_entityinfo_only = false) THEN + NEW.has_registrationinfo_with_entityinfo_only = (l_entityinfo_count > 0 AND l_attributeinfo_count = 0); + END IF; + END LOOP; + END IF; + + RETURN NEW; +END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql new file mode 100644 index 0000000000000000000000000000000000000000..36f137d1768dfa06191276d5fbb6cdf1319b1ef6 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20220311.1__bugfix_temporalfunction.sql @@ -0,0 +1,50 @@ +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.attributetype = NEW.data#>>'{@type,0}'; + + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + + IF NEW.attributeid IN ('https://uri.etsi.org/ngsi-ld/createdAt', 'https://uri.etsi.org/ngsi-ld/modifiedAt', 'https://uri.etsi.org/ngsi-ld/observedAt') THEN + NEW.value = NEW.data#>'{@value}'; + ELSE + IF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasValue') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0,@value}'; -- TODO: confirm if #> or #>> + ELSIF (NEW.data?'https://uri.etsi.org/ngsi-ld/hasObject') THEN + NEW.value = NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasObject,0,@id}'; + ELSE + NEW.value = NULL; + END IF; + END IF; + + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF (NEW.data ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + UPDATE temporalentity SET scopes = getScopes(NEW.data#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}') WHERE id = NEW.temporalentity_id; + END IF; + END IF; + + IF TG_OP = 'INSERT' THEN + select into f_internalid internalid from temporalentityattrinstance + where temporalentity_id = NEW.temporalentity_id and attributeid = NEW.attributeid limit 1; + IF FOUND THEN + NEW.static = FALSE; + UPDATE temporalentityattrinstance SET static = false + WHERE internalid = f_internalid and static = true; + ELSE + NEW.static = FALSE; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20221122.1__move161.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20221122.1__move161.sql new file mode 100644 index 0000000000000000000000000000000000000000..2bfd6cf469984dc77c1e20130833088fd0b3423d --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20221122.1__move161.sql @@ -0,0 +1,554 @@ +DROP TABLE csourceinformation; + +Alter table public.csource DROP COLUMN "location",DROP COLUMN "name", DROP COLUMN endpoint,DROP COLUMN description,DROP COLUMN timestamp_end,DROP COLUMN timestamp_start,DROP COLUMN tenant_id,DROP COLUMN internal,DROP COLUMN has_registrationinfo_with_attrs_only,DROP COLUMN has_registrationinfo_with_entityinfo_only,DROP COLUMN data_without_sysattrs,DROP COLUMN scopes, DROP COLUMN expires, DROP COLUMN type; + +ALTER TABLE PUBLIC.CSOURCE RENAME COLUMN data TO REG; + +alter table public.csource rename column id to c_id; + +ALTER TABLE PUBLIC.CSOURCE DROP CONSTRAINT csource_pkey; + +ALTER TABLE IF EXISTS public.csource + ADD CONSTRAINT unique_c_id UNIQUE (c_id); + +ALTER TABLE IF EXISTS public.csource + ADD COLUMN id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ); + +ALTER TABLE public.csource ADD PRIMARY KEY (id); + +CREATE INDEX i_csource_c_id + ON public.csource USING hash + (c_id text_pattern_ops); + +CREATE INDEX i_csource_id + ON public.csource USING btree + (id); + + +CREATE TABLE public.csourceinformation( + id bigint NOT NULL GENERATED ALWAYS AS IDENTITY ( INCREMENT 1 START 1 ), + cs_id bigint, + c_id text, + e_id text, + e_id_p text, + e_type text, + e_prop text, + e_rel text, + i_location GEOMETRY(Geometry, 4326), + scopes text[], + expires timestamp without time zone, + endpoint text, + tenant_id text, + headers jsonb, + reg_mode smallint, + createEntity boolean, + updateEntity boolean, + appendAttrs boolean, + updateAttrs boolean, + deleteAttrs boolean, + deleteEntity boolean, + createBatch boolean, + upsertBatch boolean, + updateBatch boolean, + deleteBatch boolean, + upsertTemporal boolean, + appendAttrsTemporal boolean, + deleteAttrsTemporal boolean, + updateAttrsTemporal boolean, + deleteAttrInstanceTemporal boolean, + deleteTemporal boolean, + mergeEntity boolean, + replaceEntity boolean, + replaceAttrs boolean, + mergeBatch boolean, + retrieveEntity boolean, + queryEntity boolean, + queryBatch boolean, + retrieveTemporal boolean, + queryTemporal boolean, + retrieveEntityTypes boolean, + retrieveEntityTypeDetails boolean, + retrieveEntityTypeInfo boolean, + retrieveAttrTypes boolean, + retrieveAttrTypeDetails boolean, + retrieveAttrTypeInfo boolean, + createSubscription boolean, + updateSubscription boolean, + retrieveSubscription boolean, + querySubscription boolean, + deleteSubscription boolean, + entityMap boolean, + canCompress boolean, + CONSTRAINT id_pkey PRIMARY KEY (id), + CONSTRAINT cs_id_fkey FOREIGN KEY (cs_id) + REFERENCES public.csource (id) MATCH SIMPLE + ON UPDATE CASCADE + ON DELETE CASCADE +); + + +CREATE INDEX IF NOT EXISTS fki_cs_id_fkey + ON public.csourceinformation(cs_id); + +CREATE INDEX i_csourceinformation_e_type + ON public.csourceinformation USING hash + (e_type text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_rel + ON public.csourceinformation USING hash + (e_rel text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_prop + ON public.csourceinformation USING hash + (e_prop text_pattern_ops); + +CREATE INDEX i_csourceinformation_e_id + ON public.csourceinformation USING hash + (e_id text_pattern_ops); + +CREATE INDEX i_csourceinformation_i_location + ON public.csourceinformation USING gist + (i_location gist_geometry_ops_2d); + +DROP FUNCTION public.csource_extract_jsonb_fields_to_information_table cascade; +DROP Trigger csource_extract_jsonb_fields ON csource; + +CREATE TABLE temp ( + c_id text, + reg jsonb +); +INSERT INTO temp SELECT c_id, reg FROM csource; + +DELETE FROM csource; + +INSERT INTO csource SELECT c_id, reg FROM temp; + +drop table temp; + +ALTER TABLE PUBLIC.ENTITY RENAME COLUMN DATA TO ENTITY; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN DATA_WITHOUT_SYSATTRS; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN KVDATA; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OBSERVATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN OPERATIONSPACE; + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN CONTEXT; + +ALTER TABLE PUBLIC.ENTITY ADD COLUMN E_TYPES TEXT[]; + +CREATE INDEX "I_entity_scopes" + ON public.entity USING gin + (scopes array_ops); + +CREATE INDEX "I_entity_types" + ON public.entity USING gin + (e_types array_ops); + +CREATE OR REPLACE FUNCTION public.entity_extract_jsonb_fields() RETURNS trigger LANGUAGE plpgsql AS $function$ + BEGIN + + -- do not reprocess if it is just an update on another column + IF (TG_OP = 'INSERT' AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NULL AND NEW.ENTITY IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.ENTITY IS NOT NULL AND NEW.ENTITY IS NOT NULL AND OLD.ENTITY <> NEW.ENTITY) THEN + NEW.createdat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.ENTITY#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + IF (NEW.ENTITY@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + NEW.location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.location = NULL; + END IF; + IF (NEW.ENTITY ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + NEW.scopes = getScopes(NEW.ENTITY#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSE + NEW.scopes = NULL; + END IF; + END IF; + RETURN NEW; + END; +$function$; + +UPDATE ENTITY SET E_TYPES=array_append(E_TYPES,TYPE); + +ALTER TABLE PUBLIC.ENTITY DROP COLUMN type; + + +CREATE OR REPLACE FUNCTION CSOURCE_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE +BEGIN + NEW.C_ID = NEW.REG#>>'{@id}'; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields BEFORE INSERT ON csource + FOR EACH ROW EXECUTE PROCEDURE csource_extract_jsonb_fields(); + +CREATE OR REPLACE FUNCTION CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS() RETURNS TRIGGER AS $_$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,false,false]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, entityMap, canCompress) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$_$ LANGUAGE PLPGSQL; + +CREATE TRIGGER csource_extract_jsonb_fields_to_information_table AFTER INSERT OR UPDATE ON csource + FOR EACH ROW EXECUTE PROCEDURE CSOURCEINFORMATION_EXTRACT_JSONB_FIELDS(); + +CREATE OR REPLACE FUNCTION GETMODE (MODETEXT text) RETURNS smallint AS $registry_mode$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$registry_mode$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; + + + +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + RETURN jsonb_set(ENTITY,ARRAY[attribName], tmp); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; + + diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230108.1__subscription161.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230108.1__subscription161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c8115353d5ba16497cc30b10ef8a1fe6e0915041 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230108.1__subscription161.sql @@ -0,0 +1,18 @@ +DROP TABLE subscriptions; +DROP TABLE registry_subscriptions; + +CREATE TABLE public.subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); + +CREATE TABLE public.registry_subscriptions +( + subscription_id text, + subscription jsonb, + context text, + PRIMARY KEY (subscription_id) +); \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230212.1__context.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230212.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..665c49dd33b0c8c5bfea4e2361c29df16fd01e7d --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230212.1__context.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS public.contexts +( + id text NOT NULL, + body jsonb NOT NULL, + kind text NOT NULL, + createdat timestamp without time zone, + PRIMARY KEY (id) +); +ALTER TABLE public.contexts alter createdat set default now(); diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230220.1__batchops161.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230220.1__batchops161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c31264330e2d38c953e892ff29b43295aedfc5ea --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230220.1__batchops161.sql @@ -0,0 +1,99 @@ +CREATE OR REPLACE FUNCTION NGSILD_CREATEBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOCR$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj['success'] = resultObj['success'] || (entity->'@id')::jsonb; + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOCR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_agg(entityId); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || (newentity->'@id')::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230309.1__datamigration161.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230309.1__datamigration161.sql new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230311.1__temporal161.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230311.1__temporal161.sql new file mode 100644 index 0000000000000000000000000000000000000000..c502a34416bf47b00231f8be37f6dba50a7c0c55 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230311.1__temporal161.sql @@ -0,0 +1,65 @@ +ALTER TABLE PUBLIC.temporalentity ADD COLUMN E_TYPES TEXT[]; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN VALUE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN attributetype; +CREATE INDEX "I_temporalentity_types" + ON public.temporalentity USING gin + (e_types array_ops); +UPDATE temporalentity SET E_TYPES=array_append(E_TYPES,TYPE); +ALTER TABLE PUBLIC.temporalentity DROP COLUMN type; +ALTER TABLE PUBLIC.temporalentity ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance ADD COLUMN DELETEDAT TIMESTAMP WITHOUT TIME ZONE; +ALTER TABLE PUBLIC.temporalentityattrinstance DROP COLUMN static; +CREATE OR REPLACE FUNCTION temporalentityattrinstance_extract_jsonb_fields() RETURNS trigger AS $_$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + END IF; + + RETURN NEW; + END; +$_$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopeEntry (scopeList text[]) +RETURNS jsonb AS $scopes$ +declare + scopes jsonb; + i text; +BEGIN + scopes := '[]'::jsonb; + FOREACH i IN ARRAY scopeList LOOP + scopes = scopes || jsonb_build_object('@value', i); + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE OR REPLACE FUNCTION getScopes (scopeList jsonb) +RETURNS text[] AS $scopes$ +declare + scopes text[]; + i jsonb; +BEGIN + if scopeList is null THEN + RETURN null; + END IF; + FOR i IN SELECT jsonb_array_elements FROM jsonb_array_elements(scopeList) LOOP + SELECT array_append(scopes, (i#>>'{@value}')::text) into scopes; + END LOOP; + RETURN scopes; +END; +$scopes$ LANGUAGE plpgsql; + +CREATE INDEX i_temporalentityattrinstance_attribname + ON public.temporalentityattrinstance USING hash + (attributeid text_ops); +CREATE INDEX i_temporalentity_location ON public.temporalentityattrinstance USING GIST (geovalue); \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230410.1__entitymap.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230410.1__entitymap.sql new file mode 100644 index 0000000000000000000000000000000000000000..92b172eb27cbfb372bfc729a44b1009b3946e4d5 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230410.1__entitymap.sql @@ -0,0 +1,19 @@ +CREATE TABLE public.entitymap +( + "q_token" text NOT NULL, + "entity_id" text, + "remote_hosts" jsonb, + "order_field" numeric NOT NULL +); + +CREATE INDEX i_entitymap_qtoken + ON public.entitymap USING hash + ("q_token" text_pattern_ops) +; + +CREATE TABLE public.entitymap_management +( + q_token text NOT NULL, + last_access timestamp with time zone NOT NULL, + PRIMARY KEY (q_token) +); diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230623.1__merge_patch.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230623.1__merge_patch.sql new file mode 100644 index 0000000000000000000000000000000000000000..684f327524131fa450d4e3deba24b4ab762ed4db --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230623.1__merge_patch.sql @@ -0,0 +1,36 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +if merged_json::text like '%"urn:ngsi-ld:null"%' THEN +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +end if; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..29a8a59a3c89cdad8b22af1254310c3d3f88c4c9 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230627.1__batchops161upsertfix.sql @@ -0,0 +1,29 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity); + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id'; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + EXCEPTION WHEN unique_violation THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id'; + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object((newentity->>'@id'), updated); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_object_agg(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230705.1__core_context_store.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230705.1__core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..66bf42339d3705b05931f4a532703aa74769dc73 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230705.1__core_context_store.sql @@ -0,0 +1,300 @@ +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } +} +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230711.1__getoperations_grouping.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230711.1__getoperations_grouping.sql new file mode 100644 index 0000000000000000000000000000000000000000..af7e046119aac14e17ee33dc1cc6a074d723977c --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230711.1__getoperations_grouping.sql @@ -0,0 +1,128 @@ +CREATE OR REPLACE FUNCTION GETOPERATIONS (OPERATIONJSON JSONB) RETURNS boolean[] AS $operations$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'entityMap' THEN + operations[37] = true; + WHEN 'canCompress' THEN + operations[38] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$operations$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230726.1__fixsubs.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230726.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..4520fbc02736783525f5e80a3980b023ce99263c --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230726.1__fixsubs.sql @@ -0,0 +1 @@ +update subscriptions set subscription=subscription-'https://uri.etsi.org/ngsi-ld/lastFailure ' \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230810.1__historyup.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230810.1__historyup.sql new file mode 100644 index 0000000000000000000000000000000000000000..06402b2bf88db1ca416edda068dc0dee6706574d --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230810.1__historyup.sql @@ -0,0 +1,39 @@ +ALTER TABLE IF EXISTS public.temporalentityattrinstance + ADD COLUMN IF NOT EXISTS location geometry; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_location + ON public.temporalentityattrinstance USING gist + (location) + WITH (buffering=auto) +; +CREATE INDEX IF NOT EXISTS i_temporalentityattrinstance_entityid + ON public.temporalentityattrinstance USING hash + (temporalentity_id) +; +with x as (SELECT distinct temporalentity_id as eid, geovalue, modifiedat as mat, observedat as oat, COALESCE(modifiedat, observedat) FROM temporalentityattrinstance WHERE geovalue is not null ORDER BY COALESCE(modifiedat, observedat)) UPDATE temporalentityattrinstance SET location = (SELECT x.geovalue FROM x WHERE eid = temporalentity_id and COALESCE(x.mat, x.oat) <= COALESCE(modifiedat, observedat) ORDER BY COALESCE(modifiedat, observedat) DESC limit 1) WHERE location is not null; + +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ + diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql new file mode 100644 index 0000000000000000000000000000000000000000..a17d3b8879ba7f194546f3f3ace5f41e42e9a2ec --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230811.1__bugfix_mergepatch.sql @@ -0,0 +1,52 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + RETURN previous_entity; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230822.1__bugfix_createdat.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230822.1__bugfix_createdat.sql new file mode 100644 index 0000000000000000000000000000000000000000..82cac5034c11506304e8109eb2aa122cd408b952 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230822.1__bugfix_createdat.sql @@ -0,0 +1,56 @@ +CREATE OR REPLACE FUNCTION NGSILD_PARTIALUPDATE(ENTITY jsonb, attribName text, attribValues jsonb) RETURNS jsonb AS $ENTITYPU$ +declare + tmp jsonb; + datasetId text; + insertDatasetId text; + originalEntry jsonb; + insertEntry jsonb; + inUpdate boolean; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + inUpdate := False; + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + FOR insertEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(attribValues) LOOP + insertDatasetId := insertEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF (insertDatasetId is null and datasetId is null)or (insertDatasetId is not null and datasetId is not null and insertDatasetId = datasetId) THEN + inUpdate = true; + EXIT; + END IF; + END LOOP; + IF NOT inUpdate THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + tmp := tmp || attribValues; + IF not attribValues ? 'https://uri.etsi.org/ngsi-ld/modifiedAt' THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + tmp := jsonb_set(tmp,Array['0','https://uri.etsi.org/ngsi-ld/modifiedAt'], Entity->'https://uri.etsi.org/ngsi-ld/modifiedAt',true); + END IF; + RETURN jsonb_set(Entity,Array[attribName,'0'], (Entity->attribName->0) || (tmp->0),true); +END; +$ENTITYPU$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEATTRIB(ENTITY jsonb, attribName text, deleteDatasetId text) RETURNS jsonb AS $ENTITYPD$ +declare + tmp jsonb; + datasetId text; + originalEntry jsonb; +BEGIN + tmp := '[]'::jsonb; + FOR originalEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITY->attribName) LOOP + datasetId := originalEntry #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + IF NOT ((deleteDatasetId is null and datasetId is null)or (deleteDatasetId is not null and datasetId is not null and deleteDatasetId = datasetId)) THEN + tmp := tmp || originalEntry; + END IF; + END LOOP; + IF jsonb_array_length(tmp) > 0 THEN + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN jsonb_set(ENTITY,'{attribName}', tmp); + ELSE + Entity := jsonb_set(Entity,Array['https://uri.etsi.org/ngsi-ld/modifiedAt','0'],jsonb_build_object('@type', 'https://uri.etsi.org/ngsi-ld/DateTime','@value', to_char(timezone('utc', now()), 'YYYY-MM-DD"T"HH24:MI:SS') || 'Z')); + RETURN ENTITY - attribName; + END IF; +END; +$ENTITYPD$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230901.1__update_core_context_store.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230901.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..833426b43969a0c3842988b8d0631e776f23cbd0 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230901.1__update_core_context_store.sql @@ -0,0 +1,314 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "LineString": "geojson:LineString", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabularyProperty": "ngsi-ld:VocabularyProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "attributeCount", + "attributeDetails": "attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20230904.1__fixsubs.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20230904.1__fixsubs.sql new file mode 100644 index 0000000000000000000000000000000000000000..02ca66926497a6b82e4bcf2d39ad6a5e9ec38489 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20230904.1__fixsubs.sql @@ -0,0 +1 @@ +UPDATE SUBSCRIPTIONS SET SUBSCRIPTION=JSONB_SET(SUBSCRIPTION, '{@id}', ('"'||SUBSCRIPTION_ID||'"')::jsonb, true); \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql new file mode 100644 index 0000000000000000000000000000000000000000..a09bbd49ecbaa11601b43f09a7d630fcbcaf446b --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20231015.1__batchopsprevvaluesupport.sql @@ -0,0 +1,96 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; + + +CREATE OR REPLACE FUNCTION NGSILD_DELETEBATCH(ENTITY_IDS jsonb) RETURNS jsonb AS $ENTITYODR$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, 'Not Found'); + else + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', entityId, 'old', prev_entity); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(entityId, SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYODR$ LANGUAGE PLPGSQL PARALLEL SAFE; + +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb) RETURNS jsonb AS $ENTITYOAR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF newentity ? '@type' THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + if NOT FOUND THEN resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + else resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + EXCEPTION WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOAR$ LANGUAGE PLPGSQL; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql new file mode 100644 index 0000000000000000000000000000000000000000..5088d096c22fe1aa5e8b82aa5391b25dbd76a0e3 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20231015.2__mergepatch_preveandcurrentvalue.sql @@ -0,0 +1,57 @@ +DROP FUNCTION merge_json(text,jsonb); + +CREATE OR REPLACE FUNCTION MERGE_JSON(a text, b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; +BEGIN + merged_json := (Select entity from entity where id =a); + previous_entity := (Select entity from entity where id =a); + -- Iterate through keys in JSON B + FOR key, value IN SELECT * FROM JSONB_EACH(b) + LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT + THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + raise notice '%', 'update'; + value2 := (value->0)::jsonb ; + IF jsonb_typeof(value2) = 'object' THEN + value2 :=value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + end if; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' then + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + end if; + ELSE + -- Add the key-value pair + raise notice '%', 'add'; + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + + END LOOP; +merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); +merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; +while merged_json::text like '%[]%' + or merged_json::text like '%{}%' + or merged_json::text like '%null%' loop +merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); +merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); +end loop; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; +ret := jsonb_build_array(previous_entity, merged_json); + + RETURN ret; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20231024.1__tempattrsfix.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20231024.1__tempattrsfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..795a2f213be016348be3eebc8c31bcd77c9f3a8f --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20231024.1__tempattrsfix.sql @@ -0,0 +1,25 @@ +CREATE OR REPLACE FUNCTION public.temporalentityattrinstance_extract_jsonb_fields() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ + DECLARE + f_internalid temporalentityattrinstance.internalid%TYPE; + BEGIN + IF TG_OP = 'INSERT' OR NEW.data <> OLD.data THEN -- do not reprocess if it is just an update on other column (e.g. static) + NEW.instanceid = NEW.data#>>'{https://uri.etsi.org/ngsi-ld/instanceId,0,@id}'; + NEW.createdat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/createdAt,0,@value}')::TIMESTAMP; + NEW.modifiedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/modifiedAt,0,@value}')::TIMESTAMP; + NEW.observedat = (NEW.data#>>'{https://uri.etsi.org/ngsi-ld/observedAt,0,@value}')::TIMESTAMP; + IF NEW.data#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/GeoProperty' THEN + NEW.geovalue = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.data#>'{https://uri.etsi.org/ngsi-ld/hasValue,0}') ), 4326); + ELSE + NEW.geovalue = NULL; + END IF; + IF NEW.location is NULL THEN + SELECT teai.location INTO NEW.location FROM temporalentityattrinstance teai WHERE teai.internalid = new.internalid and COALESCE(teai.modifiedat, teai.observedat) <= COALESCE(NEW.modifiedat, NEW.observedat) ORDER BY COALESCE(teai.modifiedat, teai.observedat) LIMIT 1; + END IF; + END IF; + + RETURN NEW; + END; +$function$ \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20231030__batchops_temp_fix.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20231030__batchops_temp_fix.sql new file mode 100644 index 0000000000000000000000000000000000000000..a7437255d864ad92561c657c4e23a22cb4d951b5 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20231030__batchops_temp_fix.sql @@ -0,0 +1,75 @@ +CREATE OR REPLACE FUNCTION NGSILD_APPENDBATCH(ENTITIES jsonb, NOOVERWRITE boolean) RETURNS jsonb AS $ENTITYOAR$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Overwriting'); + ELSIF NOT FOUND THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', 'Not Found'); + ELSE + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb; + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + + RETURN resultObj; +END; +$ENTITYOAR$ +LANGUAGE PLPGSQL; + + +ALTER TABLE temporalentityattrinstance ADD COLUMN IF NOT EXISTS static boolean \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20231128.1__upsertfix.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20231128.1__upsertfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..573c77b1b3701ed5532925bada113667267c7dbe --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20231128.1__upsertfix.sql @@ -0,0 +1,44 @@ +CREATE OR REPLACE FUNCTION NGSILD_UPSERTBATCH(ENTITIES jsonb, DO_REPLACE boolean ) RETURNS jsonb AS $ENTITYOUSR$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity); + WHEN OTHERS THEN + resultObj['failure'] = resultObj['failure'] || jsonb_build_object(newentity->>'@id', SQLSTATE); + END; + END LOOP; + RETURN resultObj; +END; +$ENTITYOUSR$ LANGUAGE PLPGSQL; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20231201.1__update_core_context_store.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20231201.1__update_core_context_store.sql new file mode 100644 index 0000000000000000000000000000000000000000..017016b3606fcb09d107b10217acec17bb799c2d --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20231201.1__update_core_context_store.sql @@ -0,0 +1,363 @@ +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'hosted'); \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20240212.1__merge_batchops.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20240212.1__merge_batchops.sql new file mode 100644 index 0000000000000000000000000000000000000000..c5da5b65a9b6a9189123871366d0d474a238c250 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20240212.1__merge_batchops.sql @@ -0,0 +1,66 @@ +CREATE OR REPLACE FUNCTION MERGE_JSON_BATCH(b JSONB) +RETURNS JSONB AS $$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj['success'] = resultObj['success'] || jsonb_build_object('id',newentity->'@id')::jsonb; + ELSE + resultObj['failure'] := resultObj['failure'] || jsonb_object_agg(newentity->'@id'->>0, 'Not Found'); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$$ LANGUAGE plpgsql; diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20240319.1__context.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20240319.1__context.sql new file mode 100644 index 0000000000000000000000000000000000000000..38ae052ffe9a214504c3912b7b5e6c1a92b17308 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20240319.1__context.sql @@ -0,0 +1,365 @@ +ALTER TABLE public.contexts add column lastUsage timestamp without time zone, add column numberOfHits bigint default 0; + +DELETE FROM public.contexts WHERE id = ')$%^&'; +INSERT INTO public.contexts (id, body, kind) values(')$%^&', '{ + "@context": { + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + "geojson": "https://purl.org/geojson/vocab#", + "id": "@id", + "type": "@type", + "Attribute": "ngsi-ld:Attribute", + "AttributeList": "ngsi-ld:AttributeList", + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + "Date": "ngsi-ld:Date", + "DateTime": "ngsi-ld:DateTime", + "EntityType": "ngsi-ld:EntityType", + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + "EntityTypeList": "ngsi-ld:EntityTypeList", + "Feature": "geojson:Feature", + "FeatureCollection": "geojson:FeatureCollection", + "GeoProperty": "ngsi-ld:GeoProperty", + "GeometryCollection": "geojson:GeometryCollection", + "JsonProperty": "ngsi-ld:JsonProperty", + "LanguageProperty": "ngsi-ld:LanguageProperty", + "LineString": "geojson:LineString", + "ListProperty": "ngsi-ld:ListProperty", + "ListRelationship": "ngsi-ld:ListRelationship", + "MultiLineString": "geojson:MultiLineString", + "MultiPoint": "geojson:MultiPoint", + "MultiPolygon": "geojson:MultiPolygon", + "Notification": "ngsi-ld:Notification", + "Point": "geojson:Point", + "Polygon": "geojson:Polygon", + "Property": "ngsi-ld:Property", + "Relationship": "ngsi-ld:Relationship", + "Subscription": "ngsi-ld:Subscription", + "TemporalProperty": "ngsi-ld:TemporalProperty", + "Time": "ngsi-ld:Time", + "VocabProperty": "ngsi-ld:VocabProperty", + "accept": "ngsi-ld:accept", + "attributeCount": "ngsi-ld:attributeCount", + "attributeDetails": "ngsi-ld:attributeDetails", + "attributeList": { + "@id": "ngsi-ld:attributeList", + "@type": "@vocab" + }, + "attributeName": { + "@id": "ngsi-ld:attributeName", + "@type": "@vocab" + }, + "attributeNames": { + "@id": "ngsi-ld:attributeNames", + "@type": "@vocab" + }, + "attributeTypes": { + "@id": "ngsi-ld:attributeTypes", + "@type": "@vocab" + }, + "attributes": { + "@id": "ngsi-ld:attributes", + "@type": "@vocab" + }, + "attrs": "ngsi-ld:attrs", + "avg": { + "@id": "ngsi-ld:avg", + "@container": "@list" + }, + "bbox": { + "@container": "@list", + "@id": "geojson:bbox" + }, + "cacheDuration": "ngsi-ld:cacheDuration", + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + "cooldown": "ngsi-ld:cooldown", + "coordinates": { + "@container": "@list", + "@id": "geojson:coordinates" + }, + "createdAt": { + "@id": "ngsi-ld:createdAt", + "@type": "DateTime" + }, + "csf": "ngsi-ld:csf", + "data": "ngsi-ld:data", + "dataset": { + "@id": "ngsi-ld:hasDataset", + "@container": "@index" + }, + "datasetId": { + "@id": "ngsi-ld:datasetId", + "@type": "@id" + }, + "deletedAt": { + "@id": "ngsi-ld:deletedAt", + "@type": "DateTime" + }, + "description": "http://purl.org/dc/terms/description", + "detail": "ngsi-ld:detail", + "distinctCount": { + "@id": "ngsi-ld:distinctCount", + "@container": "@list" + }, + "endAt": { + "@id": "ngsi-ld:endAt", + "@type": "DateTime" + }, + "endTimeAt": { + "@id": "ngsi-ld:endTimeAt", + "@type": "DateTime" + }, + "endpoint": "ngsi-ld:endpoint", + "entities": "ngsi-ld:entities", + "entity": "ngsi-ld:entity", + "entityCount": "ngsi-ld:entityCount", + "entityId": { + "@id": "ngsi-ld:entityId", + "@type": "@id" + }, + "entityList": { + "@id": "ngsi-ld:entityList", + "@container": "@list" + }, + "error": "ngsi-ld:error", + "errors": "ngsi-ld:errors", + "expiresAt": { + "@id": "ngsi-ld:expiresAt", + "@type": "DateTime" + }, + "features": { + "@container": "@set", + "@id": "geojson:features" + }, + "format": "ngsi-ld:format", + "geoQ": "ngsi-ld:geoQ", + "geometry": "geojson:geometry", + "geoproperty": "ngsi-ld:geoproperty", + "georel": "ngsi-ld:georel", + "idPattern": "ngsi-ld:idPattern", + "information": "ngsi-ld:information", + "instanceId": { + "@id": "ngsi-ld:instanceId", + "@type": "@id" + }, + "isActive": "ngsi-ld:isActive", + "json": { + "@id": "ngsi-ld:hasJSON", + "@type": "@json" + }, + "jsons": { + "@id": "ngsi-ld:jsons", + "@container": "@list" + }, + "key": "ngsi-ld:hasKey", + "lang": "ngsi-ld:lang", + "languageMap": { + "@id": "ngsi-ld:hasLanguageMap", + "@container": "@language" + }, + "languageMaps": { + "@id": "ngsi-ld:hasLanguageMaps", + "@container": "@list" + }, + "lastFailure": { + "@id": "ngsi-ld:lastFailure", + "@type": "DateTime" + }, + "lastNotification": { + "@id": "ngsi-ld:lastNotification", + "@type": "DateTime" + }, + "lastSuccess": { + "@id": "ngsi-ld:lastSuccess", + "@type": "DateTime" + }, + "localOnly": "ngsi-ld:localOnly", + "location": "ngsi-ld:location", + "management": "ngsi-ld:management", + "managementInterval": "ngsi-ld:managementInterval", + "max": { + "@id": "ngsi-ld:max", + "@container": "@list" + }, + "min": { + "@id": "ngsi-ld:min", + "@container": "@list" + }, + "mode": "ngsi-ld:mode", + "modifiedAt": { + "@id": "ngsi-ld:modifiedAt", + "@type": "DateTime" + }, + "notification": "ngsi-ld:notification", + "notificationTrigger": "ngsi-ld:notificationTrigger", + "notifiedAt": { + "@id": "ngsi-ld:notifiedAt", + "@type": "DateTime" + }, + "notifierInfo": "ngsi-ld:notifierInfo", + "notUpdated": "ngsi-ld:notUpdated", + "object": { + "@id": "ngsi-ld:hasObject", + "@type": "@id" + }, + "objectList": { + "@id": "ngsi-ld:hasObjectList", + "@container": "@list" + }, + "objects": { + "@id": "ngsi-ld:hasObjects", + "@container": "@list" + }, + "objectsLists": { + "@id": "ngsi-ld:hasObjectsLists", + "@container": "@list" + }, + "objectType": { + "@id": "ngsi-ld:hasObjectType", + "@type": "@vocab" + }, + "observationInterval": "ngsi-ld:observationInterval", + "observationSpace": "ngsi-ld:observationSpace", + "observedAt": { + "@id": "ngsi-ld:observedAt", + "@type": "DateTime" + }, + "operationSpace": "ngsi-ld:operationSpace", + "operations": "ngsi-ld:operations", + "previousJson": { + "@id": "ngsi-ld:hasPreviousJson", + "@type": "@json" + }, + "previousLanguageMap": { + "@id": "ngsi-ld:hasPreviousLanguageMap", + "@container": "@language" + }, + "previousObject": { + "@id": "ngsi-ld:hasPreviousObject", + "@type": "@id" + }, + "previousObjectList": { + "@id": "ngsi-ld:hasPreviousObjectList", + "@container": "@list" + }, + "previousValue": "ngsi-ld:hasPreviousValue", + "previousValueList": { + "@id": "ngsi-ld:hasPreviousValueList", + "@container": "@list" + }, + "previousVocab": { + "@id": "ngsi-ld:hasPreviousVocab", + "@type": "@vocab" + }, + "properties": "geojson:properties", + "propertyNames": { + "@id": "ngsi-ld:propertyNames", + "@type": "@vocab" + }, + "q": "ngsi-ld:q", + "reason": "ngsi-ld:reason", + "receiverInfo": "ngsi-ld:receiverInfo", + "refreshRate": "ngsi-ld:refreshRate", + "registrationId": "ngsi-ld:registrationId", + "registrationName": "ngsi-ld:registrationName", + "relationshipNames": { + "@id": "ngsi-ld:relationshipNames", + "@type": "@vocab" + }, + "scope": "ngsi-ld:scope", + "scopeQ": "ngsi-ld:scopeQ", + "showChanges": "ngsi-ld:showChanges", + "startAt": { + "@id": "ngsi-ld:startAt", + "@type": "DateTime" + }, + "status": "ngsi-ld:status", + "stddev": { + "@id": "ngsi-ld:stddev", + "@container": "@list" + }, + "subscriptionId": { + "@id": "ngsi-ld:subscriptionId", + "@type": "@id" + }, + "subscriptionName": "ngsi-ld:subscriptionName", + "success": { + "@id": "ngsi-ld:success", + "@type": "@id" + }, + "sum": { + "@id": "ngsi-ld:sum", + "@container": "@list" + }, + "sumsq": { + "@id": "ngsi-ld:sumsq", + "@container": "@list" + }, + "sysAttrs": "ngsi-ld:sysAttrs", + "temporalQ": "ngsi-ld:temporalQ", + "tenant": { + "@id": "ngsi-ld:tenant", + "@type": "@id" + }, + "throttling": "ngsi-ld:throttling", + "timeAt": { + "@id": "ngsi-ld:timeAt", + "@type": "DateTime" + }, + "timeInterval": "ngsi-ld:timeInterval", + "timeout": "ngsi-ld:timeout", + "timeproperty": "ngsi-ld:timeproperty", + "timerel": "ngsi-ld:timerel", + "timesFailed": "ngsi-ld:timesFailed", + "timesSent": "ngsi-ld:timesSent", + "title": "http://purl.org/dc/terms/title", + "totalCount": { + "@id": "ngsi-ld:totalCount", + "@container": "@list" + }, + "triggerReason": "ngsi-ld:triggerReason", + "typeList": { + "@id": "ngsi-ld:typeList", + "@type": "@vocab" + }, + "typeName": { + "@id": "ngsi-ld:typeName", + "@type": "@vocab" + }, + "typeNames": { + "@id": "ngsi-ld:typeNames", + "@type": "@vocab" + }, + "unchanged": "ngsi-ld:unchanged", + "unitCode": "ngsi-ld:unitCode", + "updated": "ngsi-ld:updated", + "uri": "ngsi-ld:uri", + "value": "ngsi-ld:hasValue", + "valueList": { + "@id": "ngsi-ld:hasValueList", + "@container": "@list" + }, + "valueLists": { + "@id": "ngsi-ld:hasValueLists", + "@container": "@list" + }, + "values": { + "@id": "ngsi-ld:hasValues", + "@container": "@list" + }, + "vocab": { + "@id": "ngsi-ld:hasVocab", + "@type": "@vocab" + }, + "vocabs": { + "@id": "ngsi-ld:hasVocabs", + "@container": "@list" + }, + "watchedAttributes": { + "@id": "ngsi-ld:watchedAttributes", + "@type": "@vocab" + }, + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + } + } +'::jsonb, 'Hosted'); \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20240530.1__entitymapupdate.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20240530.1__entitymapupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..19e8cf97e5ecba2781bc4d559f05787b4fd3e9a3 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20240530.1__entitymapupdate.sql @@ -0,0 +1,663 @@ + +DROP TABLE IF EXISTS public.entitymap; +DROP TABLE IF EXISTS public.entitymap_management; +DROP FUNCTION IF EXISTS ngsild_appendbatch(jsonb); +DROP FUNCTION IF EXISTS ngsild_upsertbatch(jsonb); + +CREATE OR REPLACE FUNCTION public.ngsild_deletebatch(IN entity_ids jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + prev_entity jsonb; + entityId text; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entityId IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(ENTITY_IDS) LOOP + BEGIN + DELETE FROM ENTITY WHERE ID = entityId RETURNING ENTITY.ENTITY INTO prev_entity; + if NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, 'Not Found')); + else + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', prev_entity)); + End IF; + EXCEPTION WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(entityId, SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_createbatch(IN entities jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + entity jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR entity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + BEGIN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (entity->>'@id', ARRAY(SELECT jsonb_array_elements_text(entity->'@type')), entity); + RAISE NOTICE 'result obj before %', resultObj; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || (entity->'@id')::jsonb); + RAISE NOTICE 'result obj after %', resultObj; + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_appendbatch(IN entities jsonb,IN nooverwrite boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + not_overwriting boolean; + to_update jsonb; + to_append jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + + BEGIN + SELECT ENTITY FROM ENTITY WHERE ID = newentity->>'@id' INTO prev_entity; + + SELECT + jsonb_object_agg(key, Array[(value->0) || jsonb_build_object('https://uri.etsi.org/ngsi-ld/createdAt', prev_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt')])::jsonb + || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + FROM jsonb_each((newentity - '@id' - '@type')) + WHERE key IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_update; + + IF NOOVERWRITE THEN + SELECT jsonb_object_agg(key, value)::jsonb + FROM jsonb_each(newentity) + WHERE key NOT IN (SELECT jsonb_object_keys(prev_entity)) + INTO to_append; + + IF to_append IS NOT NULL THEN + UPDATE ENTITY + SET ENTITY = ENTITY || to_append || jsonb_build_object('https://uri.etsi.org/ngsi-ld/modifiedAt', newentity->'https://uri.etsi.org/ngsi-ld/modifiedAt') + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + not_overwriting := true; + END IF; + ELSIF newentity ? '@type' THEN + UPDATE ENTITY + SET E_TYPES = ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), + ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + ELSE + UPDATE ENTITY + SET ENTITY = ENTITY.ENTITY || to_update || CASE WHEN to_append IS NOT NULL THEN to_append ELSE '{}' END + WHERE ID = newentity->>'@id' + RETURNING ENTITY.ENTITY INTO updated_entity; + END IF; + + IF not_overwriting THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Overwriting')); + ELSIF NOT FOUND THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + ELSE + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', newentity->'@id', 'old', prev_entity, 'new', updated_entity)::jsonb); + END IF; + + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%', SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + + RETURN resultObj; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.ngsild_upsertbatch(IN entities jsonb,IN do_replace boolean) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +declare + resultObj jsonb; + resultEntry jsonb; + newentity jsonb; + prev_entity jsonb; + updated_entity jsonb; + updated boolean; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + FOR newentity IN SELECT jsonb_array_elements FROM jsonb_array_elements(ENTITIES) LOOP + prev_entity := NULL; + updated := FALSE; + BEGIN + IF newentity ? '@type' THEN + INSERT INTO ENTITY(ID,E_TYPES, ENTITY) VALUES (newentity->>'@id', ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')), newentity) RETURNING ENTITY.ENTITY INTO updated_entity; + ELSEIF DO_REPLACE THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = newentity ||jsonb_build_object('@type',(ENTITY->'@type')) WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + ELSE + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + UPDATE ENTITY SET ENTITY = ENTITY.ENTITY || newentity WHERE id = newentity->>'@id' RETURNING ENTITY.ENTITY INTO updated_entity; + updated := TRUE; + END IF; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + EXCEPTION WHEN unique_violation THEN + SELECT ENTITY FROM ENTITY WHERE ID=newentity->>'@id' INTO prev_entity; + IF DO_REPLACE THEN + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = newentity WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + ELSE + UPDATE ENTITY SET E_TYPES = ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type')))), ENTITY = ENTITY.entity || newentity || jsonb_set(newentity, '{@type}', array_to_json(ARRAY(SELECT DISTINCT UNNEST(e_types || ARRAY(SELECT jsonb_array_elements_text(newentity->'@type'))))) ::jsonb) + WHERE ID=newentity->>'@id' RETURNING ENTITY.entity INTO updated_entity; + END IF; + + updated := TRUE; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', (newentity->>'@id'), 'updated', updated, 'old', prev_entity, 'new', updated_entity)); + WHEN OTHERS THEN + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', SQLSTATE)); + END; + END LOOP; + RETURN resultObj; +END; +$BODY$; + +CREATE TABLE public.entitymap +( + id text, + expires_at timestamp without time zone, + last_access timestamp without time zone, + entity_map jsonb, + followup_select text, + PRIMARY KEY (id) +); + +CREATE OR REPLACE FUNCTION public.getmode(IN modetext text) + RETURNS smallint + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + registry_mode smallint; +BEGIN + IF (modeText = 'auxiliary') THEN + registry_mode = 0; + ELSIF (modeText = 'inclusive') THEN + registry_mode = 1; + ELSIF (modeText = 'redirect') THEN + registry_mode = 2; + ELSIF (modeText = 'exclusive') THEN + registry_mode = 3; + ELSE + registry_mode = 1; + END IF; + RETURN registry_mode; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.updateMapIfNeeded(IN ids text[], ientityMap jsonb, entityMapToken text) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entityMapEntry jsonb; + +BEGIN + if array_length(ids, 1) = 0 or ids is null then + return ientityMap; + else + entityMapEntry := ientityMap -> 'entityMap'; + SELECT jsonb_agg(entry) INTO entityMapEntry FROM jsonb_array_elements(entityMapEntry) as entry, jsonb_object_keys(entry) as id WHERE NOT(id = ANY(ids)); + ientityMap := jsonb_set(ientityMap, '{entityMap}', entityMapEntry); + UPDATE ENTITYMAP SET LAST_ACCESS = NOW(), entity_map = ientityMap WHERE id=entityMapToken; + return ientityMap; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.getEntityMapAndEntities(IN entityMapToken text, ids text[], ilimit int, ioffset int) + RETURNS TABLE(id text, entity jsonb, parent boolean, e_types text[], entity_map jsonb) + LANGUAGE 'plpgsql' + VOLATILE + COST 100 + +AS $BODY$ +DECLARE + entitymap jsonb; + regempty boolean; + noRootLevelRegEntry boolean; + queryText text; +BEGIN + if ids is null or array_length(ids, 1) = 0 then + UPDATE ENTITYMAP SET LAST_ACCESS = NOW() WHERE ENTITYMAP.id=entityMapToken RETURNING ENTITYMAP.ENTITY_MAP INTO entitymap; + if entitymap is null then + RAISE EXCEPTION 'Nonexistent ID --> %', entityMapToken USING ERRCODE = 'S0001'; + end if; + regempty := entitymap -> 'regEmptyOrNoRegEntryAndNoLinkedQuery'; + noRootLevelRegEntry := entitymap -> 'noRootLevelRegEntryAndLinkedQuery'; + + if regempty or noRootLevelRegEntry then + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY > $2), ' + || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ' limit $3), X as (SELECT D0.ID as id, max(D0.ordinality) as maxOrdinality FROM D0 GROUP BY D0.ID), C as (SELECT updateMapIfNeeded(ids.aggIds, $4, $5) as entity_map FROM (SELECT ARRAY_AGG(a.id) as aggIds FROM a LEFT JOIN X ON a.id = X.ID WHERE X.ID IS NULL AND a.ordinality <= X.maxOrdinality) as ids)' + || (entitymap ->> 'finalselect')) using (entitymap->'entityMap'), ioffset, ilimit, entitymap, entityMapToken; + else + return query execute ('WITH a as (SELECT entityIdEntry.key as id, val.ordinality as ordinality FROM JSONB_ARRAY_ELEMENTS($1) WITH ORDINALITY as val, jsonb_each(val.value) as entityIdEntry where val.ORDINALITY between $2 and ($2 + $3) and entityIdEntry.value ? ''@none''), C as (SELECT $4 as entity_map), ' || (entitymap ->> 'selectPart') || (entitymap ->> 'wherePart') || ')' ||(entitymap ->> 'finalselect')) using entitymap->'entityMap', ioffset, ilimit, entitymap; + end if; + else + if regempty or noRootLevelRegEntry then + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + else + return query execute ((entitymap ->> 'selectPart') || ' id=any($1) AND ' || (entitymap ->> 'wherePart') || ')' || (entitymap ->> 'finalselect')) using ids; + end if; + end if; +END; +$BODY$; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS entitymap; + +ALTER TABLE IF EXISTS public.csourceinformation DROP COLUMN IF EXISTS cancompress; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN queryEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN createEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN updateEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN deleteEntityMap boolean; + +ALTER TABLE IF EXISTS public.csourceinformation + ADD COLUMN retrieveEntityMap boolean; + +UPDATE public.csourceinformation SET queryEntityMap = false,createEntityMap = false, updateEntityMap = false, deleteEntityMap = false,retrieveEntityMap = false; + +CREATE OR REPLACE FUNCTION public.getoperations(IN operationjson jsonb) + RETURNS boolean[] + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +declare + operations boolean[]; + operationEntry jsonb; +BEGIN + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false]::boolean[]; + FOR operationEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(OPERATIONJSON) LOOP + CASE operationEntry#>>'{@value}' + WHEN 'federationOps' THEN + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + operations[32] = true; + operations[33] = true; + operations[34] = true; + operations[35] = true; + operations[36] = true; + operations[37] = true; + operations[38] = true; + operations[39] = true; + operations[40] = true; + operations[41] = true; + WHEN 'updateOps' THEN + operations[2] = true; + operations[4] = true; + operations[18] = true; + operations[19] = true; + WHEN 'retrieveOps' THEN + operations[21] = true; + operations[22] = true; + WHEN 'redirectionOps' THEN + operations[1] = true; + operations[2] = true; + operations[3] = true; + operations[4] = true; + operations[5] = true; + operations[6] = true; + operations[17] = true; + operations[18] = true; + operations[21] = true; + operations[22] = true; + operations[26] = true; + operations[27] = true; + operations[28] = true; + operations[29] = true; + operations[30] = true; + operations[31] = true; + WHEN 'createEntity' THEN + operations[1] = true; + WHEN 'updateEntity' THEN + operations[2] = true; + WHEN 'appendAttrs' THEN + operations[3] = true; + WHEN 'updateAttrs' THEN + operations[4] = true; + WHEN 'deleteAttrs' THEN + operations[5] = true; + WHEN 'deleteEntity' THEN + operations[6] = true; + WHEN 'createBatch' THEN + operations[7] = true; + WHEN 'upsertBatch' THEN + operations[8] = true; + WHEN 'updateBatch' THEN + operations[9] = true; + WHEN 'deleteBatch' THEN + operations[10] = true; + WHEN 'upsertTemporal' THEN + operations[11] = true; + WHEN 'appendAttrsTemporal' THEN + operations[12] = true; + WHEN 'deleteAttrsTemporal' THEN + operations[13] = true; + WHEN 'updateAttrsTemporal' THEN + operations[14] = true; + WHEN 'deleteAttrInstanceTemporal' THEN + operations[15] = true; + WHEN 'deleteTemporal' THEN + operations[16] = true; + WHEN 'mergeEntity' THEN + operations[17] = true; + WHEN 'replaceEntity' THEN + operations[18] = true; + WHEN 'replaceAttrs' THEN + operations[19] = true; + WHEN 'mergeBatch' THEN + operations[20] = true; + WHEN 'retrieveEntity' THEN + operations[21] = true; + WHEN 'queryEntity' THEN + operations[22] = true; + WHEN 'queryBatch' THEN + operations[23] = true; + WHEN 'retrieveTemporal' THEN + operations[24] = true; + WHEN 'queryTemporal' THEN + operations[25] = true; + WHEN 'retrieveEntityTypes' THEN + operations[26] = true; + WHEN 'retrieveEntityTypeDetails' THEN + operations[27] = true; + WHEN 'retrieveEntityTypeInfo' THEN + operations[28] = true; + WHEN 'retrieveAttrTypes' THEN + operations[29] = true; + WHEN 'retrieveAttrTypeDetails' THEN + operations[30] = true; + WHEN 'retrieveAttrTypeInfo' THEN + operations[31] = true; + WHEN 'createSubscription' THEN + operations[32] = true; + WHEN 'updateSubscription' THEN + operations[33] = true; + WHEN 'retrieveSubscription' THEN + operations[34] = true; + WHEN 'querySubscription' THEN + operations[35] = true; + WHEN 'deleteSubscription' THEN + operations[36] = true; + WHEN 'queryEntityMap' THEN + operations[37] = true; + WHEN 'createEntityMap' THEN + operations[38] = true; + WHEN 'updateEntityMap' THEN + operations[39] = true; + WHEN 'deleteEntityMap' THEN + operations[40] = true; + WHEN 'retrieveEntityMap' THEN + operations[41] = true; + END CASE; + END LOOP; + RETURN operations; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.csourceinformation_extract_jsonb_fields() + RETURNS trigger + LANGUAGE 'plpgsql' + VOLATILE + COST 100 +AS $BODY$ +DECLARE + infoEntry jsonb; + entitiesEntry jsonb; + entityType text; + entityId text; + entityIdPattern text; + attribsAdded boolean; + location GEOMETRY(Geometry, 4326); + scopes text[]; + tenant text; + regMode smallint; + operations boolean[]; + endpoint text; + expires timestamp without time zone; + headers jsonb; + internalId bigint; + attribName text; + errorFound boolean; +BEGIN + IF (TG_OP = 'INSERT' AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NULL AND NEW.REG IS NOT NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NULL) OR + (TG_OP = 'UPDATE' AND OLD.REG IS NOT NULL AND NEW.REG IS NOT NULL AND OLD.REG <> NEW.REG) THEN + errorFound := false; + internalId = NEW.id; + endpoint = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/endpoint,0,@value}'; + IF NEW.REG ? 'https://uri.etsi.org/ngsi-ld/location' THEN + IF (NEW.REG@>'{"https://uri.etsi.org/ngsi-ld/location": [ {"@type": [ "https://uri.etsi.org/ngsi-ld/GeoProperty" ] } ] }') THEN + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson( NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0,https://uri.etsi.org/ngsi-ld/hasValue,0}')::text ), 4326); + ELSE + location = ST_SetSRID(ST_GeomFromGeoJSON( getGeoJson(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/location,0}')::text ), 4326); + END IF; + ELSE + location = null; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/scope}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/scope') THEN + scopes = getScopes(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/scope}'); + ELSE + scopes = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/tenant,0,@value}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/tenant') THEN + tenant = NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/tenant,0,@value}'; + ELSE + tenant = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/operations}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/operations') THEN + operations = getOperations(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/operations}'); + ELSE + operations = array[false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,false,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true,true]::boolean[]; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/contextSourceInfo}'; + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo') THEN + headers = NEW.REG#>'{https://uri.etsi.org/ngsi-ld/default-context/contextSourceInfo}'; + ELSE + headers = NULL; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/mode,0,@value}'); + ELSIF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/default-context/mode') THEN + regMode = getMode(NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/default-context/mode,0,@value}'); + ELSE + regMode = 1; + END IF; + + IF (NEW.REG ? 'https://uri.etsi.org/ngsi-ld/expires') THEN + expires = (NEW.REG#>>'{https://uri.etsi.org/ngsi-ld/expires,0,@value}')::TIMESTAMP; + ELSE + expires = NULL; + END IF; + BEGIN + IF TG_OP = 'UPDATE' THEN + DELETE FROM csourceinformation where cs_id = NEW.id; + END IF; + FOR infoEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(NEW.REG#>'{https://uri.etsi.org/ngsi-ld/information}') LOOP + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/entities' THEN + FOR entitiesEntry IN SELECT jsonb_array_elements FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/entities}') LOOP + FOR entityType IN SELECT jsonb_array_elements_text FROM jsonb_array_elements_text(entitiesEntry#>'{@type}') LOOP + entityId := NULL; + entityIdPattern := NULL; + attribsAdded := false; + IF entitiesEntry ? '@id' THEN + entityId = entitiesEntry#>>'{@id}'; + END IF; + IF entitiesEntry ? 'https://uri.etsi.org/ngsi-ld/idPattern' THEN + entityIdPattern = entitiesEntry#>>'{https://uri.etsi.org/ngsi-ld/idPattern,0,@value}'; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + attribsAdded = true; + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and id % conflicts with existing entry', attribName, entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and idpattern % conflicts with existing entry', attribName, entityIdPattern USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types) AND ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF errorFound THEN + RAISE EXCEPTION 'Registration with attrib % and type % conflicts with existing entry', attribName, entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation (cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + + END LOOP; + END IF; + IF NOT attribsAdded THEN + IF regMode > 1 THEN + IF entityId IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id = entityId AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with entityId % conflicts with existing entity', entityId USING ERRCODE='23514'; + END IF; + ELSIF entityIdPattern IS NOT NULL THEN + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE id ~ entityIdPattern AND entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with idPattern % and type % conflicts with existing entity', entityIdPattern, entityType USING ERRCODE='23514'; + END IF; + ELSE + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE entityType = ANY(e_types); + IF errorFound THEN + RAISE EXCEPTION 'Registration with type % conflicts with existing entity', entityType USING ERRCODE='23514'; + END IF; + END IF; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) values (internalId, NEW.C_ID, entityId, entityIdPattern, entityType, NULL, NULL, location, scopes, expires, endpoint, tenant, headers, regMode,operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END IF; + END LOOP; + END LOOP; + ELSE + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/propertyNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/propertyNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' != 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, NULL, attribName, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + IF infoEntry ? 'https://uri.etsi.org/ngsi-ld/relationshipNames' THEN + FOR attribName IN SELECT value#>>'{@id}' FROM jsonb_array_elements(infoEntry#>'{https://uri.etsi.org/ngsi-ld/relationshipNames}') LOOP + SELECT count(id)>0 INTO errorFound FROM ENTITY WHERE ENTITY ? attribName AND EXISTS (SELECT FROM jsonb_array_elements(ENTITY->attribName) as attribBody WHERE attribBody#>>'{@type,0}' = 'https://uri.etsi.org/ngsi-ld/Relationship'); + IF regMode > 1 AND errorFound THEN + RAISE EXCEPTION 'Attribute % conflicts with existing entity', attribName USING ERRCODE='23514'; + END IF; + INSERT INTO csourceinformation(cs_id, c_id, e_id, e_id_p, e_type, e_rel, e_prop, i_location, scopes, expires, endpoint, tenant_id, headers, reg_mode, createEntity, updateEntity, appendAttrs, updateAttrs, deleteAttrs, deleteEntity, createBatch, upsertBatch, updateBatch, deleteBatch, upsertTemporal, appendAttrsTemporal, deleteAttrsTemporal, updateAttrsTemporal, deleteAttrInstanceTemporal, deleteTemporal, mergeEntity, replaceEntity, replaceAttrs, mergeBatch, retrieveEntity, queryEntity, queryBatch, retrieveTemporal, queryTemporal, retrieveEntityTypes, retrieveEntityTypeDetails, retrieveEntityTypeInfo, retrieveAttrTypes, retrieveAttrTypeDetails, retrieveAttrTypeInfo, createSubscription, updateSubscription, retrieveSubscription, querySubscription, deleteSubscription, queryEntityMap, createEntityMap, updateEntityMap, deleteEntityMap, retrieveEntityMap) VALUES (internalId, NEW.C_ID, NULL, NULL, NULL, attribName, NULL, location, scopes, expires, endpoint, tenant, headers, regMode, operations[1],operations[2],operations[3],operations[4],operations[5],operations[6],operations[7],operations[8],operations[9],operations[10],operations[11],operations[12],operations[13],operations[14],operations[15],operations[16],operations[17],operations[18],operations[19],operations[20],operations[21],operations[22],operations[23],operations[24],operations[25],operations[26],operations[27],operations[28],operations[29],operations[30],operations[31],operations[32],operations[33],operations[34],operations[35],operations[36],operations[37],operations[38],operations[39],operations[40],operations[41]); + END LOOP; + END IF; + END IF; + END LOOP; + END; + END IF; + RETURN NEW; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20240730.1__mergejsonupdate.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20240730.1__mergejsonupdate.sql new file mode 100644 index 0000000000000000000000000000000000000000..474a2ef4780544dc6697fefec62900f6c79bc1ed --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20240730.1__mergejsonupdate.sql @@ -0,0 +1,834 @@ +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB[]; + newentity jsonb; + resultObj jsonb; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + IF EXISTS (SELECT entity FROM entity WHERE id = newentity->'@id'->>0) THEN + merged_json := (SELECT entity FROM entity WHERE id = newentity->'@id'->>0); + previous_entity := merged_json; + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id',newentity->>'@id', 'old', previous_entity)); + ELSE + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object(newentity->>'@id', 'Not Found')); + CONTINUE; + END IF; + + -- Iterate through keys in JSONB value + FOR key, value IN SELECT * FROM JSONB_EACH(newentity) LOOP + IF value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasValue": [{"@value": "urn:ngsi-ld:null"}]%'::TEXT + OR value::TEXT LIKE '%"https://uri.etsi.org/ngsi-ld/hasObject": [{"@id": "urn:ngsi-ld:null"}]%'::TEXT THEN + -- Delete the key + merged_json := merged_json - key; + ELSIF merged_json ? key THEN + -- Update the value + value2 := (value->0)::jsonb; + IF jsonb_typeof(value2) = 'object' THEN + value2 := value2 - 'https://uri.etsi.org/ngsi-ld/createdAt'; + END IF; + merged_json := jsonb_set(merged_json, ARRAY[key], jsonb_build_array(value2), true); + IF previous_entity->key->0 ? 'https://uri.etsi.org/ngsi-ld/createdAt' THEN + merged_json := jsonb_set(merged_json, ARRAY[key,'0','https://uri.etsi.org/ngsi-ld/createdAt'], (previous_entity->key->0->'https://uri.etsi.org/ngsi-ld/createdAt'), true); + END IF; + ELSE + -- Add the key-value pair + merged_json := jsonb_set(merged_json, ARRAY[key], value, true); + END IF; + END LOOP; + + -- Perform cleanup operations on merged_json + merged_json := jsonb_strip_nulls(replace(merged_json::text,'"urn:ngsi-ld:null"','null')::jsonb); + merged_json := regexp_replace(merged_json::text, '{"@language": "[^"]*"}', 'null', 'g')::jsonb; + + WHILE merged_json::text LIKE '%[]%' OR merged_json::text LIKE '%{}%' OR merged_json::text LIKE '%null%' LOOP + merged_json := jsonb_strip_nulls(replace(merged_json::text,'null,','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,', null','')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[null]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'[]','null')::jsonb); + merged_json := jsonb_strip_nulls(replace(merged_json::text,'{}','null')::jsonb); + END LOOP; + + -- Update entity table with merged JSON and extract @type values into an array + UPDATE entity SET entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) WHERE id = newentity->'@id'->>0; + END LOOP; + + -- Return the result object + RETURN resultObj; +END; +$BODY$; + +UPDATE contexts SET body = '{ + + "@context": { + + "@version": 1.1, + + "@protected": true, + + "ngsi-ld": "https://uri.etsi.org/ngsi-ld/", + + "geojson": "https://purl.org/geojson/vocab#", + + "id": "@id", + + "type": "@type", + + "Attribute": "ngsi-ld:Attribute", + + "AttributeList": "ngsi-ld:AttributeList", + + "ContextSourceIdentity": "ngsi-ld:ContextSourceIdentity", + + "ContextSourceNotification": "ngsi-ld:ContextSourceNotification", + + "ContextSourceRegistration": "ngsi-ld:ContextSourceRegistration", + + "Date": "ngsi-ld:Date", + + "DateTime": "ngsi-ld:DateTime", + + "EntityType": "ngsi-ld:EntityType", + + "EntityTypeInfo": "ngsi-ld:EntityTypeInfo", + + "EntityTypeList": "ngsi-ld:EntityTypeList", + + "Feature": "geojson:Feature", + + "FeatureCollection": "geojson:FeatureCollection", + + "GeoProperty": "ngsi-ld:GeoProperty", + + "GeometryCollection": "geojson:GeometryCollection", + + "JsonProperty": "ngsi-ld:JsonProperty", + + "LanguageProperty": "ngsi-ld:LanguageProperty", + + "LineString": "geojson:LineString", + + "ListProperty": "ngsi-ld:ListProperty", + + "ListRelationship": "ngsi-ld:ListRelationship", + + "MultiLineString": "geojson:MultiLineString", + + "MultiPoint": "geojson:MultiPoint", + + "MultiPolygon": "geojson:MultiPolygon", + + "Notification": "ngsi-ld:Notification", + + "Point": "geojson:Point", + + "Polygon": "geojson:Polygon", + + "Property": "ngsi-ld:Property", + + "Relationship": "ngsi-ld:Relationship", + + "Subscription": "ngsi-ld:Subscription", + + "TemporalProperty": "ngsi-ld:TemporalProperty", + + "Time": "ngsi-ld:Time", + + "VocabProperty": "ngsi-ld:VocabProperty", + + "accept": "ngsi-ld:accept", + + "attributeCount": "attributeCount", + + "attributeDetails": "attributeDetails", + + "attributeList": { + + "@id": "ngsi-ld:attributeList", + + "@type": "@vocab" + + }, + + "attributeName": { + + "@id": "ngsi-ld:attributeName", + + "@type": "@vocab" + + }, + + "attributeNames": { + + "@id": "ngsi-ld:attributeNames", + + "@type": "@vocab" + + }, + + "attributeTypes": { + + "@id": "ngsi-ld:attributeTypes", + + "@type": "@vocab" + + }, + + "attributes": { + + "@id": "ngsi-ld:attributes", + + "@type": "@vocab" + + }, + + "attrs": "ngsi-ld:attrs", + + "avg": { + + "@id": "ngsi-ld:avg", + + "@container": "@list" + + }, + + "bbox": { + + "@container": "@list", + + "@id": "geojson:bbox" + + }, + + "cacheDuration": "ngsi-ld:cacheDuration", + + "containedBy": "ngsi-ld:isContainedBy", + + "contextSourceAlias": "ngsi-ld:contextSourceAlias", + + "contextSourceExtras": { + + "@id": "ngsi-ld:contextSourceExtras", + + "@type": "@json" + + }, + + "contextSourceInfo": "ngsi-ld:contextSourceInfo", + + "contextSourceTimeAt": { + + "@id": "ngsi-ld:contextSourceTimeAt", + + "@type": "DateTime" + + }, + + "contextSourceUptime": "ngsi-ld:contextSourceUptime", + + "cooldown": "ngsi-ld:cooldown", + + "coordinates": { + + "@container": "@list", + + "@id": "geojson:coordinates" + + }, + + "createdAt": { + + "@id": "ngsi-ld:createdAt", + + "@type": "DateTime" + + }, + + "csf": "ngsi-ld:csf", + + "data": "ngsi-ld:data", + + "dataset": { + + "@id": "ngsi-ld:hasDataset", + + "@container": "@index" + + }, + + "datasetId": { + + "@id": "ngsi-ld:datasetId", + + "@type": "@id" + + }, + + "deletedAt": { + + "@id": "ngsi-ld:deletedAt", + + "@type": "DateTime" + + }, + + "description": "http://purl.org/dc/terms/description", + + "detail": "ngsi-ld:detail", + + "distinctCount": { + + "@id": "ngsi-ld:distinctCount", + + "@container": "@list" + + }, + + "endAt": { + + "@id": "ngsi-ld:endAt", + + "@type": "DateTime" + + }, + + "endTimeAt": { + + "@id": "ngsi-ld:endTimeAt", + + "@type": "DateTime" + + }, + + "endpoint": "ngsi-ld:endpoint", + + "entities": "ngsi-ld:entities", + + "pick": "ngsi-ld:pick", + + "omit": "ngsi-ld:omit", + + "jsonKeys": "ngsi-ld:jsonKeys", + + "entity": "ngsi-ld:entity", + + "entityCount": "ngsi-ld:entityCount", + + "entityId": { + + "@id": "ngsi-ld:entityId", + + "@type": "@id" + + }, + + "entityList": { + + "@id": "ngsi-ld:entityList", + + "@container": "@list" + + }, + + "entityMap": "ngsi-ld:hasEntityMap", + + "error": "ngsi-ld:error", + + "errors": "ngsi-ld:errors", + + "expiresAt": { + + "@id": "ngsi-ld:expiresAt", + + "@type": "DateTime" + + }, + + "features": { + + "@container": "@set", + + "@id": "geojson:features" + + }, + + "format": "ngsi-ld:format", + + "geoQ": "ngsi-ld:geoQ", + + "geometry": "geojson:geometry", + + "geoproperty": "ngsi-ld:geoproperty", + + "georel": "ngsi-ld:georel", + + "idPattern": "ngsi-ld:idPattern", + + "information": "ngsi-ld:information", + + "instanceId": { + + "@id": "ngsi-ld:instanceId", + + "@type": "@id" + + }, + + "isActive": "ngsi-ld:isActive", + + "join": "ngsi-ld:join", + + "joinLevel": "ngsi-ld:hasJoinLevel", + + "json": { + + "@id": "ngsi-ld:hasJSON", "@type": "@json" + + }, + + "jsons": { + + "@id": "ngsi-ld:jsons", + + "@container": "@list" + + }, + + "key": "ngsi-ld:hasKey", + + "lang": "ngsi-ld:lang", + + "languageMap": { + + "@id": "ngsi-ld:hasLanguageMap", + + "@container": "@language" + + }, + + "languageMaps": { + + "@id": "ngsi-ld:hasLanguageMaps", + + "@container": "@list" + + }, + + "lastFailure": { + + "@id": "ngsi-ld:lastFailure", + + "@type": "DateTime" + + }, + + "lastNotification": { + + "@id": "ngsi-ld:lastNotification", + + "@type": "DateTime" + + }, + + "lastSuccess": { + + "@id": "ngsi-ld:lastSuccess", + + "@type": "DateTime" + + }, + + "linkedMaps": "ngsi-ld:linkedMaps", + + "localOnly": "ngsi-ld:localOnly", + + "location": "ngsi-ld:location", + + "management": "ngsi-ld:management", + + "managementInterval": "ngsi-ld:managementInterval", + + "max": { + + "@id": "ngsi-ld:max", + + "@container": "@list" + + }, + + "min": { + + "@id": "ngsi-ld:min", + + "@container": "@list" + + }, + + "mode": "ngsi-ld:mode", + + "modifiedAt": { + + "@id": "ngsi-ld:modifiedAt", + + "@type": "DateTime" + + }, + + "notification": "ngsi-ld:notification", + + "notificationTrigger": "ngsi-ld:notificationTrigger", + + "notifiedAt": { + + "@id": "ngsi-ld:notifiedAt", + + "@type": "DateTime" + + }, + + "notifierInfo": "ngsi-ld:notifierInfo", + + "notUpdated": "ngsi-ld:notUpdated", + + "object": { + + "@id": "ngsi-ld:hasObject", + + "@type": "@id" + + }, + + "objectList": { + + "@id": "ngsi-ld:hasObjectList", + + "@container": "@list" + + }, + + "objects": { + + "@id": "ngsi-ld:hasObjects", + + "@container": "@list" + + }, + + "objectsLists": { + + "@id": "ngsi-ld:hasObjectsLists", + + "@container": "@list" + + }, + + "objectType": { + + "@id": "ngsi-ld:hasObjectType", + + "@type": "@vocab" + + }, + + "observationInterval": "ngsi-ld:observationInterval", + + "observationSpace": "ngsi-ld:observationSpace", + + "observedAt": { + + "@id": "ngsi-ld:observedAt", + + "@type": "DateTime" + + }, + + "operationSpace": "ngsi-ld:operationSpace", + + "operations": "ngsi-ld:operations", + + "previousJson": { + + "@id": "ngsi-ld:hasPreviousJson", + + "@type": "@json" + + }, + + "previousLanguageMap": { + + "@id": "ngsi-ld:hasPreviousLanguageMap", + + "@container": "@language" + + }, + + "previousObject": { + + "@id": "ngsi-ld:hasPreviousObject", + + "@type": "@id" + + }, + + "previousObjectList": { + + "@id": "ngsi-ld:hasPreviousObjectList", + + "@container": "@list" + + }, + + "previousValue": "ngsi-ld:hasPreviousValue", + + "previousValueList": { + + "@id": "ngsi-ld:hasPreviousValueList", + + "@container": "@list" + + }, + + "previousVocab": { + + "@id": "ngsi-ld:hasPreviousVocab", + + "@type": "@vocab" + + }, + + "properties": "geojson:properties", + + "propertyNames": { + + "@id": "ngsi-ld:propertyNames", + + "@type": "@vocab" + + }, + + "q": "ngsi-ld:q", + + "reason": "ngsi-ld:reason", + + "receiverInfo": "ngsi-ld:receiverInfo", + + "refreshRate": "ngsi-ld:refreshRate", + + "registrationId": "ngsi-ld:registrationId", + + "registrationName": "ngsi-ld:registrationName", + + "relationshipNames": { + + "@id": "ngsi-ld:relationshipNames", + + "@type": "@vocab" + + }, + + "scope": "ngsi-ld:scope", + + "scopeQ": "ngsi-ld:scopeQ", + + "showChanges": "ngsi-ld:showChanges", + + "startAt": { + + "@id": "ngsi-ld:startAt", + + "@type": "DateTime" + + }, + + "status": "ngsi-ld:status", + + "stddev": { + + "@id": "ngsi-ld:stddev", + + "@container": "@list" + + }, + + "subscriptionId": { + + "@id": "ngsi-ld:subscriptionId", + + "@type": "@id" + + }, + + "subscriptionName": "ngsi-ld:subscriptionName", + + "success": { + + "@id": "ngsi-ld:success", + + "@type": "@id" + + }, + + "sum": { + + "@id": "ngsi-ld:sum", + + "@container": "@list" + + }, + + "sumsq": { + + "@id": "ngsi-ld:sumsq", + + "@container": "@list" + + }, + + "sysAttrs": "ngsi-ld:sysAttrs", + + "temporalQ": "ngsi-ld:temporalQ", + + "tenant": { + + "@id": "ngsi-ld:tenant", + + "@type": "@id" + + }, + + "throttling": "ngsi-ld:throttling", + + "timeAt": { + + "@id": "ngsi-ld:timeAt", + + "@type": "DateTime" + + }, + + "timeInterval": "ngsi-ld:timeInterval", + + "timeout": "ngsi-ld:timeout", + + "timeproperty": "ngsi-ld:timeproperty", + + "timerel": "ngsi-ld:timerel", + + "timesFailed": "ngsi-ld:timesFailed", + + "timesSent": "ngsi-ld:timesSent", + + "title": "http://purl.org/dc/terms/title", + + "totalCount": { + + "@id": "ngsi-ld:totalCount", + + "@container": "@list" + + }, + + "triggerReason": "ngsi-ld:triggerReason", + + "typeList": { + + "@id": "ngsi-ld:typeList", + + "@type": "@vocab" + + }, + + "typeName": { + + "@id": "ngsi-ld:typeName", + + "@type": "@vocab" + + }, + + "typeNames": { + + "@id": "ngsi-ld:typeNames", + + "@type": "@vocab" + + }, + + "unchanged": "ngsi-ld:unchanged", + + "unitCode": "ngsi-ld:unitCode", + + "updated": "ngsi-ld:updated", + + "uri": "ngsi-ld:uri", + + "value": "ngsi-ld:hasValue", + + "valueList": { + + "@id": "ngsi-ld:hasValueList", + + "@container": "@list" + + }, + + "valueLists": { + + "@id": "ngsi-ld:hasValueLists", + + "@container": "@list" + + }, + + "values": { + + "@id": "ngsi-ld:hasValues", + + "@container": "@list" + + }, + + "vocab": { + + "@id": "ngsi-ld:hasVocab", + + "@type": "@vocab" + + }, + + "vocabs": { + + "@id": "ngsi-ld:hasVocabs", + + "@container": "@list" + + }, + + "watchedAttributes": { + + "@id": "ngsi-ld:watchedAttributes", + + "@type": "@vocab" + + }, + + "@vocab": "https://uri.etsi.org/ngsi-ld/default-context/" + + } + +} + +'::jsonb WHERE id=')$%^&'; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20240801.1__mergepatchfix.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20240801.1__mergepatchfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..90d4785b7e7d4b82c6ac1bf4c88ac56043f995bc --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20240801.1__mergepatchfix.sql @@ -0,0 +1,963 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_point(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE +BEGIN + if not geo_json_entry ? '@list' or jsonb_array_length(geo_json_entry #> '{@list}') != 2 then + RAISE EXCEPTION 'Invalid geo point for geo json' USING ERRCODE = 'SB006'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.VALIDATE_GEO_JSON(IN GEO_JSON_ENTRY JSONB) RETURNS VOID LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + geo_type text; + value jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.clean_ngsi_ld_null(IN json_entry jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + json_type text; + result jsonb; + value jsonb; + cleaned jsonb; + key text; +BEGIN + json_type = jsonb_typeof(json_entry); + if json_type = 'array' then + result = '[]'::jsonb; + for value in select * from jsonb_array_elements(json_entry) loop + cleaned = clean_ngsi_ld_null(value); + if cleaned is not null then + result = result || cleaned; + end if; + end loop; + if jsonb_array_length(result) = 0 then + return null; + end if; + return result; + elsif json_type = 'object' then + result = '{}'; + for key, value in Select * from jsonb_each(json_entry) loop + if value::text != '"urn:ngsi-ld:null"' then + result = jsonb_set(result, '{key}', value); + end if; + end loop; + if result::text = '{}' then + return null; + end if; + return result; + else + if json_entry::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return json_entry; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_json(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_type text; + old_type text; + todelete jsonb; + deleted integer; + i integer; + index integer; + value jsonb; + value2 jsonb; + merged_json jsonb; + key text; +BEGIN + new_type = jsonb_typeof(new_attrib); + old_type = jsonb_typeof(old_attrib); + if old_attrib is null or new_type != old_type then + old_attrib := new_attrib; + end if; + todelete = '[]'::jsonb; + if new_type = 'array' then + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + for i in 0 .. jsonb_array_length(new_attrib) loop + if new_attrib ->> i = 'urn:ngsi-ld:null' then + todelete = todelete || i; + end if; + end loop; + deleted = 0; + if array_length(todelete) > 0 then + for i in select * from jsonb_array_elements(todelete) loop + new_attrib = new_attrib - (i - deleted); + deleted = deleted + 1; + end loop; + end if; + return new_attrib; + end if; + index = 0; + deleted = 0; + for value in select * from jsonb_array_elements(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + index := index + 1; + continue; + end if; + value2 = old_attrib[index - deleted]; + merged_json = merge_has_json(value, value2); + if merged_json is null then + old_attrib = old_attrib - (index - deleted); + deleted = deleted + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - deleted)]::text[], merged_json); + end if; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + elsif new_type = 'object' then + for key, value in Select * from jsonb_each(new_attrib) loop + if value::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - key; + continue; + end if; + merged_json = merge_has_json(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + continue; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end loop; + if old_attrib::text = '{}' then + return null; + end if; + return old_attrib; + else + if new_attrib::text = '"urn:ngsi-ld:null"' then + return null; + end if; + return new_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_vocab(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_language_map(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + index integer; + remove boolean; + value2 jsonb; + ln_found boolean; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in Select * from jsonb_array_elements(new_attrib) loop + if value ->> '@language' = '@none' and value ->> '@value' = 'urn:ngsi-ld:null' then + return null; + else + index = 0; + ln_found = false; + remove = false; + for value2 in Select * from jsonb_array_elements(old_attrib) loop + if value2 ->> '@language' = value->> '@language' then + ln_found = true; + if value ->> '@value' = 'urn:ngsi-ld:null' then + remove = true; + end if; + exit; + end if; + index = index + 1; + end loop; + if ln_found then + if remove then + old_attrib = old_attrib - index; + else + old_attrib = jsonb_set(old_attrib, ARRAY[index,'@value']::text[], value->'@value'); + end if; + else + old_attrib = old_attrib || value; + end if; + end if; + end loop; + RETURN old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_geo(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + key text; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,https://purl.org/geojson/vocab#coordinates,0,@list,0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + for value in select * from jsonb_array_elements(new_attrib) loop + PERFORM validate_geo_json(value); + end loop; + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://purl.org/geojson/vocab#coordinates' then + if value2 #>> '{0,@list,0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + elsif key = '@type' then + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + else + RAISE EXCEPTION 'Unknown type of an attribute for geojson' USING ERRCODE = 'SB003'; + end if; + end loop; + PERFORM validate_geo_json(old_attrib[(index - removed)]); + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib #> '{0,@list}'; + if old_attrib is null then + old_attrib = new_attrib; + end if; + old_value_list = old_attrib #> '{0,@list}'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed)]::text[], (old_attrib #> ARRAY[0,'@list',(index-removed)]::text[]) - key); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[0,'@list',(index-removed),key]::text[], merged_json); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB004'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; + +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_object(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; +BEGIN + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@id}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@id' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - (index - removed); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[(index - removed),key]::text[], value2); + end if; + else + RAISE EXCEPTION 'Unknown type of an attribute for relationship' USING ERRCODE = 'SB003'; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value_list(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + new_value_list jsonb; + old_value_list jsonb; + index integer; + removed integer; + value jsonb; + key text; + value2 jsonb; + merged_json jsonb; +BEGIN + new_value_list = new_attrib -> '@list'; + if old_attrib is null then + old_attrib := new_attrib; + end if; + old_value_list = old_attrib -> '@list'; + if jsonb_array_length(new_value_list) != jsonb_array_length(old_value_list) then + if new_value_list #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_value_list) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], value2); + end if; + elsif key = '@list' then + merged_json = merge_has_value_list(value, old_value_list[index - removed]); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + + else + merged_json = merge_has_value(value2, old_value_list[index - removed] -> key); + if merged_json is null then + old_attrib = jsonb_set(old_attrib, ARRAY['@list']::text[], (old_attrib #> ARRAY['@list']::text[]) - (index-removed)); + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY['@list',(index-removed),key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_value_list) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_has_value(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + value jsonb; + value2 jsonb; + merged_json jsonb; + index integer; + removed integer; + arr_idx integer; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + if jsonb_array_length(new_attrib) != jsonb_array_length(old_attrib) then + if new_attrib #>> '{0,@value}' = 'urn:ngsi-ld:null' then + return null; + else + return new_attrib; + end if; + else + index := 0; + removed := 0; + for value in select * from jsonb_array_elements(new_attrib) loop + for key, value2 in select * from jsonb_each(value) loop + if key = '@value' then + arr_idx := index - removed; + if value2::text = '"urn:ngsi-ld:null"' then + old_attrib = old_attrib - arr_idx; + removed := removed + 1; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], value2); + end if; + else + arr_idx := index - removed; + merged_json = merge_has_value(value2, old_attrib #> ARRAY[arr_idx,key]::text[]); + if merged_json is null then + old_attrib[arr_idx] = old_attrib[arr_idx] - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[arr_idx,key]::text[], merged_json); + end if; + end if; + end loop; + index := index + 1; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return null; + end if; + return old_attrib; + end if; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_ATTRIB_INSTANCE(IN NEW_ATTRIB JSONB, + + IN OLD_ATTRIB JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL SAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + attrib_type TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + key text; +BEGIN + if old_attrib is null then + old_attrib := new_attrib; + end if; + new_attrib := new_attrib - 'https://uri.etsi.org/ngsi-ld/createdAt'; + attrib_type := old_attrib #>> '{@type,0}'; + if attrib_type != new_attrib #>> '{@type,0}' then + RAISE EXCEPTION 'Cannot change type of an attribute' USING ERRCODE = 'SB001'; + end if; + if attrib_type = 'https://uri.etsi.org/ngsi-ld/Property' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/Relationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObject' then + merged_json = merge_has_object(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValueList' then + merged_json = merge_has_value_list(value[0], old_attrib #> '{key,0}'); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/unitCode' then + if value #>> '{0,@value}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/ListRelationship' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectType' then + if value #>> '{0,@id}' = 'urn:ngsi-ld:null' then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + end if; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasObjectList' then + merged_json = merge_has_object_list(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/GeoProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasValue' then + merged_json = merge_has_value_geo(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/LanguageProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasLanguageMap' then + merged_json = merge_has_language_map(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/VocabProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasVocab' then + merged_json = merge_has_vocab(value, old_attrib -> key); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + elsif attrib_type = 'https://uri.etsi.org/ngsi-ld/JsonProperty' then + for key, value in SELECT * FROM JSONB_EACH(new_attrib) loop + if key = '@type' or key = 'https://uri.etsi.org/ngsi-ld/datasetId' then + continue; + elsif key = 'https://uri.etsi.org/ngsi-ld/hasJSON' then + merged_json = merge_has_json(value #> ARRAY[0,'@value']::text[], old_attrib #> ARRAY[key,0,'@value']::text[]); + if merged_json is null then + return null; + end if; + old_attrib = jsonb_set(old_attrib, ARRAY[key,0,'@value']::text[], merged_json); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' or key = 'https://uri.etsi.org/ngsi-ld/observedAt' then + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], value); + else + merged_json = merge_attrib(value, old_attrib -> key); + if merged_json is null then + old_attrib = old_attrib - key; + else + old_attrib = jsonb_set(old_attrib, ARRAY[key]::text[], merged_json); + end if; + end if; + end loop; + else + RAISE EXCEPTION 'Unknown type of an attribute %, %, %', attrib_type, old_attrib, new_attrib USING ERRCODE = 'SB002'; + end if; + return old_attrib; +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + else + if new_dataset_id is null then + deleted := deleted || 'null'; + else + deleted := deleted || new_dataset_id; + end if; + end if; + else + if new_dataset_id is null then + updated := updated || 'null'; + else + updated := updated || new_dataset_id; + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION PUBLIC.MERGE_JSON(IN A text,IN B JSONB) RETURNS JSONB LANGUAGE 'plpgsql' VOLATILE PARALLEL UNSAFE COST 100 AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + deleted JSONB; + updated JSONB; +BEGIN + +Select entity into previous_entity from entity where id =a; +if previous_entity is null then + RAISE EXCEPTION 'Entity not found.' USING ERRCODE = '02000'; +end if; +Select entity into merged_json from entity where id =a; +deleted := '{}'; +updated := '{}'; +-- Iterate through keys in JSON B +FOR key, value IN SELECT * FROM JSONB_EACH(b) +LOOP + if key = '@id' or key = 'https://uri.etsi.org/ngsi-ld/createdAt'then + continue; + elsif key = '@type' then + value2 = merged_json -> key; + WITH combined AS ( + SELECT jsonb_array_elements(value) AS elem + UNION + SELECT jsonb_array_elements(value2) AS elem + ) + SELECT jsonb_agg(elem) into value2 AS merged_array FROM combined; + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2); + elsif key = 'https://uri.etsi.org/ngsi-ld/modifiedAt' then + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value); + else + value2 = merged_json -> key; + value2 = merge_attrib(value, value2); + if value2 ->'result' = 'null'::jsonb or jsonb_array_length(value2 ->'result') = 0 then + merged_json = merged_json - key; + deleted = jsonb_set(deleted, ARRAY[key]::text[], '["@all"]'::jsonb); + else + merged_json = jsonb_set(merged_json, ARRAY[key]::text[], value2 -> 'result'); + if jsonb_array_length(value2 -> 'deleted') != 0 then + if deleted ? key then + deleted = jsonb_set(deleted, ARRAY[key], ((deleted -> key) || (value2 -> 'deleted'))); + else + deleted = jsonb_set(deleted, ARRAY[key], ((value2 -> 'deleted'))); + end if; + end if; + + if jsonb_array_length(value2 -> 'updated') != 0 then + if updated ? key then + updated = jsonb_set(updated, ARRAY[key], ((updated -> key) || (value2 -> 'updated'))); + else + updated = jsonb_set(updated, ARRAY[key], ((value2 -> 'updated'))); + end if; + end if; + + end if; + + + end if; +END LOOP; +update entity set entity = merged_json, e_types = ARRAY(SELECT jsonb_array_elements_text(merged_json->'@type')) where id = a; + +RETURN jsonb_build_object('old', previous_entity, 'new', merged_json, 'deleted', deleted, 'updated', updated); +END; +$BODY$; + +CREATE OR REPLACE FUNCTION public.merge_json_batch(IN b jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL UNSAFE + COST 100 + +AS $BODY$ +DECLARE + merged_json JSONB; + key TEXT; + value JSONB; + value2 JSONB; + previous_entity JSONB; + ret JSONB; + newentity jsonb; + resultObj jsonb; + entityId text; + index integer; +BEGIN + resultObj := '{"success": [], "failure": []}'::jsonb; + index := 0; + FOR newentity IN SELECT jsonb_array_elements(b) LOOP + entityId := newentity->>'@id'; + IF entityId is null then + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_build_object('no id row nr ' || index, 'No entity id provided')); + else + BEGIN + ret := MERGE_JSON(entityId, newentity); + resultObj = jsonb_set(resultObj, '{success}', resultObj -> 'success' || jsonb_build_object('id', entityId, 'old', ret -> 'old', 'new', ret -> 'new', 'deleted', ret -> 'deleted', 'updated', ret -> 'updated')::jsonb); + EXCEPTION + WHEN OTHERS THEN + RAISE NOTICE '%, %', SQLSTATE, SQLERRM; + resultObj = jsonb_set(resultObj, '{failure}', resultObj -> 'failure' || jsonb_object_agg(entityId, SQLSTATE)); + END; + end if; + index := index + 1; + END LOOP; + RETURN resultObj; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20240905.1__validategeojsonfix.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20240905.1__validategeojsonfix.sql new file mode 100644 index 0000000000000000000000000000000000000000..f9eea8fca78af88cd9cca10817372067d2fec0e3 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20240905.1__validategeojsonfix.sql @@ -0,0 +1,69 @@ +CREATE OR REPLACE FUNCTION public.validate_geo_json(IN geo_json_entry jsonb) + RETURNS void + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 + +AS $BODY$ +DECLARE + geo_type text; + value jsonb; + value2 jsonb; +BEGIN + geo_type = geo_json_entry #>> '{@type,0}'; + if geo_type = 'https://purl.org/geojson/vocab#Point' then + PERFORM validate_geo_point(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}'); + elsif geo_type = 'https://purl.org/geojson/vocab#LineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#Polygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPoint' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi point update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + elsif geo_type = 'https://purl.org/geojson/vocab#MultiLineString' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi line string update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + + elsif geo_type = 'https://purl.org/geojson/vocab#MultiPolygon' then + if not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value in select * from jsonb_array_elements(geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list}') loop + if not value #> '{https://purl.org/geojson/vocab#coordinates,0}' ? '@list' or not geo_json_entry #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0}' ? '@list' then + RAISE EXCEPTION 'Invalid multi polygon update for geo json' USING ERRCODE = 'SB006'; + end if; + for value2 in select * from jsonb_array_elements(value #> '{https://purl.org/geojson/vocab#coordinates,0,@list,0,@list}') loop + PERFORM validate_geo_point(value); + end loop; + end loop; + else + RAISE EXCEPTION 'Invalid geo json type' USING ERRCODE = 'SB007'; + end if; +RETURN; +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/db/migration/V20240922.1__mergeattrib.sql b/scorpio-broker/at-context-server/target/classes/db/migration/V20240922.1__mergeattrib.sql new file mode 100644 index 0000000000000000000000000000000000000000..98411df52c5cfd8208b71983d6624d4bfd7452b9 --- /dev/null +++ b/scorpio-broker/at-context-server/target/classes/db/migration/V20240922.1__mergeattrib.sql @@ -0,0 +1,71 @@ +CREATE OR REPLACE FUNCTION public.merge_attrib(IN new_attrib jsonb,IN old_attrib jsonb) + RETURNS jsonb + LANGUAGE 'plpgsql' + VOLATILE + PARALLEL SAFE + COST 100 +AS $BODY$ +DECLARE + merged_json JSONB; + value JSONB; + value2 JSONB; + new_dataset_id TEXT; + old_dataset_id TEXT; + index INTEGER; + found boolean; + deleted jsonb; + updated jsonb; + tmp jsonb; +BEGIN + deleted := '[]'::jsonb; + updated := '[]'::jsonb; + if jsonb_typeof(new_attrib) != 'array' then + RAISE EXCEPTION 'Cannot invalid structure' USING ERRCODE = 'SB002'; + end if; + if old_attrib is null then + old_attrib := new_attrib; + end if; + for value in select * from jsonb_array_elements(new_attrib) loop + new_dataset_id = value #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + index := 0; + found := false; + for value2 in select * from jsonb_array_elements(old_attrib) loop + old_dataset_id = value2 #>> '{https://uri.etsi.org/ngsi-ld/datasetId,0,@id}'; + if (old_dataset_id is null and new_dataset_id is null) or (old_dataset_id is not null and new_dataset_id is not null and old_dataset_id = new_dataset_id) then + found := true; + merged_json = merge_attrib_instance(value, value2); + EXIT; + end if; + index := index + 1; + end loop; + if found then + old_attrib = old_attrib - index; + if merged_json is not null then + old_attrib = old_attrib || merged_json; + if new_dataset_id is null then + updated := updated || ('null'::jsonb); + else + updated := updated || to_jsonb(new_dataset_id); + end if; + else + if new_dataset_id is null then + deleted := deleted || ('null'::jsonb); + else + deleted := deleted || to_jsonb(new_dataset_id); + end if; + end if; + else + if new_dataset_id is null then + updated := updated || ('null'::jsonb); + else + updated := updated || to_jsonb(new_dataset_id); + end if; + old_attrib = old_attrib || value; + end if; + end loop; + if jsonb_array_length(old_attrib) = 0 then + return jsonb_build_object('result', 'null'::jsonb, 'deleted', deleted, 'updated', updated); + end if; +return jsonb_build_object('result', old_attrib, 'deleted', deleted, 'updated', updated); +END; +$BODY$; \ No newline at end of file diff --git a/scorpio-broker/at-context-server/target/classes/eu/neclab/ngsildbroker/atcontextserver/cache/ContextCache.class b/scorpio-broker/at-context-server/target/classes/eu/neclab/ngsildbroker/atcontextserver/cache/ContextCache.class new file mode 100644 index 0000000000000000000000000000000000000000..fa91ab0f0c65b4b72aff86aa3a11f3c6eae69ca2 Binary files /dev/null and b/scorpio-broker/at-context-server/target/classes/eu/neclab/ngsildbroker/atcontextserver/cache/ContextCache.class differ diff --git a/scorpio-broker/at-context-server/target/classes/eu/neclab/ngsildbroker/atcontextserver/controller/ContextController.class b/scorpio-broker/at-context-server/target/classes/eu/neclab/ngsildbroker/atcontextserver/controller/ContextController.class new file mode 100644 index 0000000000000000000000000000000000000000..71dce1a0884b9b7dbb90b81a629d5348624b4517 Binary files /dev/null and b/scorpio-broker/at-context-server/target/classes/eu/neclab/ngsildbroker/atcontextserver/controller/ContextController.class differ diff --git a/scorpio-broker/at-context-server/target/classes/eu/neclab/ngsildbroker/atcontextserver/dao/ContextDao.class b/scorpio-broker/at-context-server/target/classes/eu/neclab/ngsildbroker/atcontextserver/dao/ContextDao.class new file mode 100644 index 0000000000000000000000000000000000000000..a7e010c4b81e9775e8ad4c0abd965d61d38f0c36 Binary files /dev/null and b/scorpio-broker/at-context-server/target/classes/eu/neclab/ngsildbroker/atcontextserver/dao/ContextDao.class differ diff --git a/scorpio-broker/at-context-server/target/classes/eu/neclab/ngsildbroker/atcontextserver/service/ContextService.class b/scorpio-broker/at-context-server/target/classes/eu/neclab/ngsildbroker/atcontextserver/service/ContextService.class new file mode 100644 index 0000000000000000000000000000000000000000..9e2f461ba7f20dadeed7854baa25e340954f59eb Binary files /dev/null and b/scorpio-broker/at-context-server/target/classes/eu/neclab/ngsildbroker/atcontextserver/service/ContextService.class differ diff --git a/scorpio-broker/at-context-server/target/maven-archiver/pom.properties b/scorpio-broker/at-context-server/target/maven-archiver/pom.properties new file mode 100644 index 0000000000000000000000000000000000000000..7b461f6cc2379c2ca457f829a5a63708a615097b --- /dev/null +++ b/scorpio-broker/at-context-server/target/maven-archiver/pom.properties @@ -0,0 +1,5 @@ +#Generated by Maven +#Fri Jan 03 03:27:19 UTC 2025 +groupId=eu.neclab.ngsildbroker +artifactId=at-context-server +version=5.0.5-SNAPSHOT diff --git a/scorpio-broker/at-context-server/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst b/scorpio-broker/at-context-server/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..42ac59aaa8c39348ed5f08c2252e63090d6ec730 --- /dev/null +++ b/scorpio-broker/at-context-server/target/maven-status/maven-compiler-plugin/compile/default-compile/createdFiles.lst @@ -0,0 +1,4 @@ +eu/neclab/ngsildbroker/atcontextserver/dao/ContextDao.class +eu/neclab/ngsildbroker/atcontextserver/service/ContextService.class +eu/neclab/ngsildbroker/atcontextserver/cache/ContextCache.class +eu/neclab/ngsildbroker/atcontextserver/controller/ContextController.class diff --git a/scorpio-broker/at-context-server/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst b/scorpio-broker/at-context-server/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst new file mode 100644 index 0000000000000000000000000000000000000000..f47bf1e916729019f8ce3717c1320d552de827c0 --- /dev/null +++ b/scorpio-broker/at-context-server/target/maven-status/maven-compiler-plugin/compile/default-compile/inputFiles.lst @@ -0,0 +1,4 @@ +/root/scorpio/ScorpioBroker2/ScorpioBroker/at-context-server/src/main/java/eu/neclab/ngsildbroker/atcontextserver/cache/ContextCache.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/at-context-server/src/main/java/eu/neclab/ngsildbroker/atcontextserver/controller/ContextController.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/at-context-server/src/main/java/eu/neclab/ngsildbroker/atcontextserver/dao/ContextDao.java +/root/scorpio/ScorpioBroker2/ScorpioBroker/at-context-server/src/main/java/eu/neclab/ngsildbroker/atcontextserver/service/ContextService.java diff --git a/scorpio-broker/at-context-server/target/quarkus-app/app/at-context-server-5.0.5-SNAPSHOT.jar b/scorpio-broker/at-context-server/target/quarkus-app/app/at-context-server-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..0c93b013750ecf5ee48b7bfd77154efb0d754b25 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/app/at-context-server-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..9d5d452f4a66d1165b27d4d604d2bddaad0e4cd7 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.github.crac.org-crac-0.1.3.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d9c956f0001cff936eb20c373592dcc6510f5ff Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.quarkus.quarkus-bootstrap-runner-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f9f583af71764a7127e4da73677ed5c8dea97bfb Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.quarkus.quarkus-classloader-commons-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0447153ded16f86775cdcb337f8576579c1ac0db Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.quarkus.quarkus-development-mode-spi-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..243fa559c2be0f61720e1a333e43d4da2e8b5516 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.quarkus.quarkus-vertx-latebound-mdc-provider-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ce4cbac674f51eb2063ff475a0e70484b25ace9f Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-constraint-2.5.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..55063658b2d25baf50b6a3963c508233f695b3de Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-cpu-2.5.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..4a7a8e4b537b532f4f58717f3366e9cb6f0ff0ce Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-expression-2.5.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b320c9d42b93b26981927265e5dc62b85e73263f Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-function-2.5.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d2520242b28d239ba3b138c17e65f8ae6103a787 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-io-2.5.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..0648aa9f6a2ea3b3ddd083471f1c8dacbb4bbc4f Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-net-2.5.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..134f669d9fbee6ca61a9c9bb36227376ed97d0a2 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-os-2.5.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3365ab16bb0cd576f88b808e9af9b024111a5070 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/io.smallrye.common.smallrye-common-ref-2.5.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..778ea557232b0fb41df34d63353c219a371660b2 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/org.jboss.logging.jboss-logging-3.6.0.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fd5c901f4bb0e8ed59d2d040740021a7c5cf1b19 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/boot/org.jboss.logmanager.jboss-logmanager-3.0.6.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f74846f3f35230a9e14c0bf98e4cccfec593b4c9 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.brotli4j-1.16.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ee6d31a6a025d92f9fb05550483fb6b415b7f066 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.native-linux-x86_64-1.16.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..52835b2540d6cb8aaffe22bd5c7c24203cc77538 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.aayushatharva.brotli4j.service-1.16.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..dba6fdd40e73a6dfc06a462bc2646c2bfb7e6d5a Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.cronutils.cron-utils-9.2.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c13bcb9104e907195d13bbb4f998c1e5594cc2e8 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-annotations-2.17.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..34be9026612b9553f55f5f1aed148fc96a9d8fcb Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-core-2.17.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3750b8c1cfae96e79305618c78653ac5fb9b6de5 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.core.jackson-databind-2.17.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..327ee706dcf46e428dd6339b9744e77941e5b498 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-toml-2.17.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c47febca79456ba4d389bbb46ea0e11e6a41bede Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.dataformat.jackson-dataformat-yaml-2.17.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6ff58aed923740c9a4f639b9a512dcfd08df921 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jdk8-2.17.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..3aa01f1ee73130e4983d2c3520220b29995c4ccc Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.datatype.jackson-datatype-jsr310-2.17.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..288bf56e1b4f5c5a2bb2152887c5ef12e6cddeae Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.fasterxml.jackson.module.jackson-module-parameter-names-2.17.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..f4f1af783043658e2b3879560b6e1ff0b8db66a1 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.github.ben-manes.caffeine.caffeine-3.1.5.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..85cefa6d7b5b644fb99075f6621ca60beb350cd9 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.github.filosganga.geogson-core-1.2.21.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar new file mode 100644 index 0000000000000000000000000000000000000000..c4b2b999ce72dcfdd9b63d6a62d0a2d7e2037034 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.github.filosganga.geogson-jts-1.2.21.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1d6d1c50a344b45ba375d53775b70ad5aac58f7 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.github.luben.zstd-jni-1.5.6-3.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..18e59c8c41de73e02e77298e981fa7e3051e4b5d Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.code.gson.gson-2.11.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a8f5dfe1b83122a9f085da1aa7fff451ed88e783 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.errorprone.error_prone_annotations-2.30.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b56dc751c1cc7dff75ed80ccbb45f027058e8ce Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.guava.failureaccess-1.0.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar new file mode 100644 index 0000000000000000000000000000000000000000..10d10b62a49ad095f56d620620ee7eaa5d2fc62d Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.guava.guava-33.2.1-jre.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..3595c4f9be5c0ce779f8dd611e7f6917ca518f5d Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.google.j2objc.j2objc-annotations-2.8.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a3336373b7aea1700b62d9aa60a15493586c3e8a Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.ongres.scram.client-2.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..71079166b7bc51455b1e1d18ea4e5e942b3ae89f Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.ongres.scram.common-2.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cbb633729cae09e5d65aefccd7b63c697f42b5cb Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.ongres.stringprep.saslprep-1.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eecfb70406fbaca61c7c9e5a549f77cbef2e849b Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.ongres.stringprep.stringprep-1.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a690bed6986df8a510ee4f05b2079264db7d71af Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/com.vividsolutions.jts-core-1.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5023670d73e75c539b0af285d35c4e9edaef2211 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/commons-codec.commons-codec-1.17.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..eb3c2b0b82115e9820f781e944312b4c19b25ed4 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/commons-io.commons-io-2.16.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar new file mode 100644 index 0000000000000000000000000000000000000000..705f285c9348d57ec059c73b90ed9836f4db6aa4 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/eu.neclab.ngsildbroker.commons-5.0.5-SNAPSHOT.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..684cc24821451b65b9fc36376131490a9d03b37c Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.agroal.agroal-api-2.5.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..36f71a550601f4842536ad1a62a187a30c60eb69 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.agroal.agroal-narayana-2.5.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..b4f917b9687dc231c8f3b4cf3fc0a95e616846ea Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.agroal.agroal-pool-2.5.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..878cc677337985f59ed9f4bb5cfcdb8ca4d0acbe Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-buffer-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..9afa6d70ae20b7082f786920e918fd70c138a5b3 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b626ceafb52c318581529fafbd1d33889f25c20 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..043052d031f59a3b289cc2bb7dda9b8352c58e11 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-haproxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..316bdec6ab1a6ea4cd4dc33c9217cdf96e4c9049 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-http-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..73e95705d3152472d11fa2a5690626b652ff280a Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-http2-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..7192fa0e49a0762ad44c218215e6197dd12197b2 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-mqtt-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..4ecfb5dbec2f25d201de0a83d1143729830d49bd Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-codec-socks-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..072d06d46d38bcb7a63efb38075bf79ea111caf1 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e19ecdbd4d53bc0c09246f73926aaae70fe9493 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-handler-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..08c1d314876730dc6f82ba65e741f03b8719609d Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-handler-proxy-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..3427ee30e0e35e876eda5e5f1bed695d8c2636e9 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-resolver-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5facdd8e2f6e4f4fa3ceecc16962b3b15b9627c2 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-resolver-dns-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..575355b83ce351f70a605a46ef5fa7f75f956835 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..39f5787fb3d458de0fd9f575345176b60e050961 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-classes-epoll-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..a1e0f39535831b6f914098b776513c284c50b351 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-classes-kqueue-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..9637836909b5e59f7a83f88412a8571b4641f56b Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-native-epoll-4.1.111.Final-linux-x86_64.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar new file mode 100644 index 0000000000000000000000000000000000000000..93bc0ad010aab29e0a328d2ffafaeb60c82b3ecf Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-native-kqueue-4.1.111.Final-osx-x86_64.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..f06de9ed2afaa3668bccce11dcfe8924446817d0 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.netty.netty-transport-native-unix-common-4.1.111.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9f5b95529a5c7ac6044ffa4c60e6d412a1597104 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-2.5.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..9edc36a8a46a29b0e5a507fbd092a0e6629b8db7 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.instrumentation.opentelemetry-instrumentation-api-incubator-2.5.0-alpha.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fe2973fcc47beca49e8e4061fe60a78c2a27102 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-1.39.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..0aedb22b8903d6eb70232c1e310266e0465b5f54 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-api-incubator-1.39.0-alpha.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..084d172fc3512a05145851e902ab1377a9d8b814 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.opentelemetry-context-1.39.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar new file mode 100644 index 0000000000000000000000000000000000000000..b740a4a827c0808baeda7112a5c40158e4eba664 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.opentelemetry.semconv.opentelemetry-semconv-1.26.0-alpha.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f99a4fc63360f9f5d1f0d751928b0b93d62ac81 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkiverse.loggingmanager.quarkus-logging-manager-3.1.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..75e3104ca9d784fa7fcaca27ecd239c02ac6e241 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkiverse.systemd.notify.quarkus-systemd-notify-1.0.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3eab7c30fefb9c141226da5595a5faf9ff83d462 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.arc.arc-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..196cc61fca2b75593a6ac4cd38744fb6fcdc33bd Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-agroal-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..070baec335bcc35a79bd1ddf7ee52e809331c4c3 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-arc-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..80221e424d28060c8620964fef488107c54f4e94 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c607cbdf717926be37b821d11c9c4b0ed9a2802 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-cache-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..17262bba0ff1a57d9e893d832f8268389adfe340 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-caffeine-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..30ef9922f2c8fe0c6a28cd108f33f9632fe82411 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b3a47cef61d53f9997148c1502dbb4ec313a6a47 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5b530ad1e484936df857104583ffcaf78b1715e3 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-container-image-docker-common-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..96950282b9778bf3a3bd26d4c9125ffdc0f95c96 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-core-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b447f8aeb87c42e4489c0db15f30f93d4707fa45 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-credentials-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..11a94c16234d6e460c6e66a840ac7976e756c458 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d325421d690f179c4626844df98915a36e74ea Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-datasource-common-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0089e0cb796e6183029df4847b1b600d974a8b1a Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..6abfe2c3ec3923ddea06c4e6f4a4a440d72fa649 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-flyway-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar new file mode 100644 index 0000000000000000000000000000000000000000..99c263dc3f71a215a4a85901538f1dedacb51acf Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-fs-util-0.0.10.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d9dbbe1b73c8f5435b7309f77520f6cbff82600c Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-info-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5d3b9086c238e3c3dbb2af68ce47f52eb269c664 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-info-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c03be12b0b2c7549c2db53ed275e45cd87e8a4dc Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-jackson-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..0bf27883a28a42e50d17bca78ce368d79acaea13 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-jdbc-postgresql-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1f440dcb8f735c055768cd95c2bc4457c4cbbb48 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-jsonp-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e7adeb414fcb6108390beabc72d94e7f933e055d Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-kafka-client-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fba12107dee341bd5894a0e05deb09280561e517 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..48ec53f0dba9ea734d83d21f7d793bb2178bb8d1 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kafka-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff161c12c4949a473a1d2ed1f597d6338337a321 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-messaging-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..577b2bb43f4750ecb4f5c1b9ba952d3866bcea22 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..10a28407ba7c16165164c35be0f9657812349a84 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-mutiny-reactive-streams-operators-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2d0ee5b30dbe7a8cb00c15600142ef6041a97f44 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-narayana-jta-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e3c7750dd2c267fea46f510636ae576bf26894a Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-netty-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a38e1850efbb73aaf248318253b11e0d0a64a50e Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-datasource-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9e257c119006bed0c77c8b8a6b2e05adfc71dde6 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-reactive-pg-client-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a85e9e76c2870fca83485b31334a81a02f12781 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..548a6405f93dd3254c1123ecbcce2f96f6995e30 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-common-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f65840267108fe2b704976f14e3c17bd02290e20 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..68b053f16299ba0a9bcd80c6e795c2c9e0b186a1 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-rest-jackson-common-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..89aaf8be3250033348a32ac75c1edc52d9d264ac Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..add785757c2a958f86e3a20c6034d46a38427d83 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-api-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..e6f98b3a199f27f5c5ab47000862c0558116f3ef Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-common-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa88a2ec11f998225646b08795fedc215e530e6f Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-kotlin-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..cfd8c45d73d6d6e8da9c8fd24e04b2dcb86bff83 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-scheduler-spi-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6438a41b88c1605a994451cda5657f0a52669ad Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-security-runtime-spi-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4bb4f9457907978ea011d1fc390c6ab2e6217888 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-context-propagation-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..494d0bb23d9ae6619c2a8f2e18ec458676f39316 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-health-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8ec4c28f2f004dc6d7b8ca0d36eeaa4b79f4a4bd Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-metrics-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d5bfe9c8b3c66f75327e6ce621da822c6b67fc Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-smallrye-openapi-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..90607ec6492a8667ea6d2c39f77389e4083a2017 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-swagger-ui-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8f8612553283b7f00d9dc7ab7eb5313b2f1eca2 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-tls-registry-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..9b0c51a8f29ef87a1c6390dd98c2e1e6aa0114f7 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-transaction-annotations-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..1dcea88498ba439a3ac32e291067331c99331ace Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..92721ae048041c7dbddda0c88d417d740f3c299d Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-vertx-http-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..3199cb971aa88ab386df667ad1d16d74d3bee22f Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.quarkus-virtual-threads-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c0ebec7fa19a8e96489714a2f9986017522becfb Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..4696de7fd6cd077015cad5212b223de1dcc80307 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..04bc9cc78782c3b7bc78553481b94a10a02f8f9d Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-common-types-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..14d1445dba365c803f94f24f6fcb8467d13cf6e5 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-jackson-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5859e692a03f1173aa1f2b272a844c94d24c12e3 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.resteasy.reactive.resteasy-reactive-vertx-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c8cb76d3f0d9c0853168e740754f4d79042a105c Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.security.quarkus-security-2.1.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b038ad4a83ac6df0e0a8f71265877dad2ded43ab Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.quarkus.vertx.utils.quarkus-vertx-utils-3.14.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c63764d56c7a95f9abf5230b80fae1a414651b61 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-annotation-2.5.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df5d82b54357c697c0d95fd9b8db277facfe0cde Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-classloader-2.5.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..07d3721a9c5c5e314e5eee21ec168139bcfed6c4 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.common.smallrye-common-vertx-context-2.5.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8f7f3668d903b867c021d47fd08c02c45374e29c Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-3.9.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a774ecd7174486d0c6d077c388f1021c3b2ef05 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-common-3.9.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f90f28b0d6afa7518cd3095f64d8dbd9fd56b77d Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.config.smallrye-config-core-3.9.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ba5add79fc5dee32ae8d3dfc0ffeebe75541b603 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.jandex-3.2.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..d32bd30fa7bfa74da1330810bc3eb3c07f174385 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-2.6.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..cc828adf40879d957e266f0b781f7bb3d864af07 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-reactive-streams-operators-2.6.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..08a59e5bb2d5572b0fdbe693f48a3f0a5073d4f0 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-smallrye-context-propagation-2.6.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b821617242260f35360fb4247a01d66dea563004 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-1.1.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9908f1069718a85031db676224857e26dc7401f0 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.mutiny-zero-flow-adapters-1.1.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d09f435a3cd1ae0395b4926c78311be6b276e0c2 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-auth-common-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9bf5d12b59534df460f6ccb01dad8c8ffcd8a542 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-bridge-common-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3c5639431a5d51ef16ef94d82225effb271e8467 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-core-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d55e5f07ddae6c98b08fbff4848461fdbb2adf73 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-mqtt-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..760d92fc11fe3672030cdd7c71416ad75b8ac775 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-pg-client-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7504902f7621ee1764e207e2f5b2ffdeaa22e261 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-runtime-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..11867e294ef35ca6e9a6a82fb52dd52821c35d48 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-sql-client-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e268ff4ef4543b76568d66444df43ce45751d3bc Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-uri-template-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..662cf14d5c427f09ea259c484befd4cc2d328b2d Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..417d8b59b8ce5998e21e0275beb878ac50008bd8 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-client-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d3ab26e2476843ebb0d27c983443ff9a7ba3fee8 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-mutiny-vertx-web-common-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f08a939bd2b90b9f87dc3f15ab88e15e8c48087e Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-api-3.0.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..abb08bb751b0e037476fc15d4da40a44d5432966 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-converter-mutiny-3.0.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57bc48ad35c44ab05ee4f8575f552ea0dd3b04c2 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-api-4.24.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..98d6fa439ad69c1dc9436d76d43e3a34636729ad Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-camel-4.24.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..373471c5b69b1468fff9a50d614c1b84fa69fbdd Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-health-4.24.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..ff59cf0f6f80ce2f55e1f08afbd1750c7df6e2fc Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-in-memory-4.24.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..49443391feae0e4b18c570a3a007db6187108031 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-4.24.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..723430b5a2624a415e2754bad5f69fd151e71dd3 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-kafka-api-4.24.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..c6169a6b468ab05de2c7962fc64ce8567f46c444 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-otel-4.24.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b7cc8f08c3aa4845416101f90521f8736eef6973 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.smallrye-reactive-messaging-provider-4.24.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f1d68c985deb0d3b5c865a91ca3489e94d623dcf Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.reactive.vertx-mutiny-generator-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..99dc47ca15667630606af6d41f5e616acd66b168 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-2.1.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..c7f07c892fbe634044ba8b8333aac2195d84a4b0 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-api-2.1.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..97a49b08f496d01d6a50e3b2455476c5b23ba50d Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-jta-2.1.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..ece3eb2d02159abb38ed4ea6b8fc4b599616c79e Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-context-propagation-storage-2.1.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..9ed9c88db7be8648c5b501b787a1addf9f0c1981 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-fault-tolerance-vertx-6.4.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b8cc43f8b9896a24d916d7bfdbdd02e396d7db1 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-health-4.1.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b232bcabeac47c30ac02371188d10bdb5d214a88 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-health-api-4.1.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..8701dd9d98c6c7d61ea547823759f909474f7a9d Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-health-provided-checks-4.1.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..473ce54348410627710157274dd8d2fdfa39c0df Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-metrics-4.0.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..16255cf4093cd574a00574f812ee5109478fe9a4 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.smallrye.smallrye-open-api-core-3.10.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c3c712e90ce6ebdae145eec147d6d4a50bd0fe53 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-auth-common-4.5.9.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..354030f4393f7f920b6e03ff894d8e5ea8727797 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-bridge-common-4.5.9.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..284a20484992c849fd9a5fb0f83f8f639f5d562f Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-codegen-4.5.9.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..467756a558a61103b2dc767e0833e561540ed8fd Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-core-4.5.9.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..477c706ef15c0f6622b55e64d3cb623cb3fd3ebd Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-mqtt-4.5.9.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..305727ab77b9dcd6df44f8868efc6859b51a419e Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-pg-client-4.5.9.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8c00aa871496b3293e1c008449162d449b9f64ea Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-sql-client-4.5.9.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..8a4fe6ed7c025ce502f81bbee92b36230457d647 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-uri-template-4.5.9.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..c322385c3404a0196e8d509f2f7cb892fcb0883a Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-web-4.5.9.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..2e158629507bc39be5c13b1b31fa3563e0ac44b1 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-web-client-4.5.9.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar new file mode 100644 index 0000000000000000000000000000000000000000..b16bdbbc285bd848e6dac42208acfa3d6bfa6fab Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/io.vertx.vertx-web-common-4.5.9.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..0d015d54205c084ad31609cc2909853fee83476a Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.activation.jakarta.activation-api-2.1.3.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..34c1d4394b358572a2c79b543cb7d094b0ba1b8d Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.annotation.jakarta.annotation-api-3.0.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..316080f3b56134e8a821a50511b0e831886c9184 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.el.jakarta.el-api-5.0.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5edfd71412f1470c93366d821b353ebaa85ecac3 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.cdi-api-4.1.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a3214715591141e2d6fbb0f5b71f52126a571a5 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.enterprise.jakarta.enterprise.lang-model-4.1.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..a92e099d4fc25523e2830fa9b8181d319c9369a7 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.inject.jakarta.inject-api-2.0.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..3a5b5b5fc36bb88093fd25a30b2d1d7fbe9e3cba Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.interceptor.jakarta.interceptor-api-2.2.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..dbcbea90f1434f04d12c2039f9213c704d82ec31 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.json.jakarta.json-api-2.1.3.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..5a5d9089734b0a7061dc14c4afc35884cc507636 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.resource.jakarta.resource-api-2.1.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..b1e7da4be43dd1a10393608d1aff9c7a87460461 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.transaction.jakarta.transaction-api-2.0.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..80670a1b87a7680fbac8c690d599361f8dd8d2ea Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.ws.rs.jakarta.ws.rs-api-3.1.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..48242829bc38133a9cdcd36f8b2a9eebc53ab91a Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/jakarta.xml.bind.jakarta.xml.bind-api-4.0.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bffe4865f5835839900292dce062bb2f24921d76 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-api-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..d8c4c56c7943f2fb3fcf2207f77f3bb7dd623550 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-base-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57b4cba6199355aa6c8fc9ad96e01f122e8613ff Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-base-engine-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..2c4fe13ea86560b5be41f0602c8cef57dac8fed8 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-componentdsl-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..46d3e9ba64ae347356342ce7bd81694068426ce4 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-catalog-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..29fb4d66863e0ed67c991f16d7224d8e1b88ed98 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-engine-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..83006f2ec04934050d5d41b033761f8c6f93c80a Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-languages-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..66bc6931d673d72136bf7634d9d0653576bb05de Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-model-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7fb9dadfc4843c7b3bd48544fe9c39068014ae49 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-processor-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..57eb121f34753b359ed328615b12d1335cdef5e0 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-core-reifier-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..666c3e4d030b41a58901b540d76547698cea41b4 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-endpointdsl-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f5768a039af60a776a24b738659227810a5fc563 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-main-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..b2d092cdf3c4b145df42a44b536ed0f8a604574c Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-management-api-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..08f353ce447773c14714390536f2abde814dff5b Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-microprofile-config-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..df32ff7e11ad182102a805c854bfc71cf2fec90e Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-reactive-streams-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a93a240a399ca96f7f6298a849de2fdc996f066d Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-support-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..f8b05ebef8ea87d46d09eac112c1e8a5a9a549fc Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-tooling-model-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..eab628714f1e46bbabce09f59d5149071cb7b8ee Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-util-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..bcf2ba147eb2b4d9c75a637aed2c96628a8369cd Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-util-json-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..e12f44ed11522b1f45d2935972336fb6852cb26d Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.camel-xml-jaxp-util-4.7.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..7b0a2bef9c92372239c8136f4bacad2e44297a95 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-core-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..97e7161abe0534de78448dde1d119002283ced55 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-reactive-streams-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..a4e88280321db778bbdff6fce3aed8e6ce6e4980 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.camel.quarkus.camel-quarkus-smallrye-reactive-messaging-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..da9302ff29a560b5f10d3184f25d699fe2d9c186 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.commons.commons-lang3-3.14.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..2bb7c07363c9a44ea63fe96c827a34e296b8188c Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-4.5.14.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar new file mode 100644 index 0000000000000000000000000000000000000000..9a8ac703dcd1b00c37aa6f8dc9a8a9b3d42145f6 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.httpcomponents.httpclient-cache-4.5.14.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar new file mode 100644 index 0000000000000000000000000000000000000000..f0bdebeb94bce461c49ded7e28d6e6c63bf6a367 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.httpcomponents.httpcore-4.4.16.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8b3c8ff0adc42f592363a883cd691d292aada837 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.apache.kafka.kafka-clients-3.7.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa7fdabb307af8221e7e0a1526f2c97d6ba35ec4 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.checkerframework.checker-qual-3.46.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..49953e8fa25ed42f4127011561a6e84869fe5d82 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.config.microprofile-config-api-3.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar new file mode 100644 index 0000000000000000000000000000000000000000..d139727d04b6b6acdfcb520566c8c60cbbcb7fb1 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.context-propagation.microprofile-context-propagation-api-1.3.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..f6077c71e50c276649060a8fac39f6384fa67019 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.health.microprofile-health-api-4.0.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..d6d2e53ffaa9f0685843fd2b35fe18afd543249a Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.metrics.microprofile-metrics-api-4.0.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..c9b38d9c315bae8eb8c7d4eeacb26a8f2ca16085 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.openapi.microprofile-openapi-api-3.1.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..50933a1fdc4d2f285542845bb89f9b34cef192f2 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-api-3.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..45581840b5a7e858949ee6198f8a1f7bd772fb32 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.microprofile.reactive-streams-operators.microprofile-reactive-streams-operators-core-3.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar new file mode 100644 index 0000000000000000000000000000000000000000..e3432492d5d204a1745e138497d9ede28ffb854e Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.eclipse.parsson.parsson-1.1.7.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d2ade3e92ab51059b64ce3026377e1bd5991777 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.flywaydb.flyway-core-10.17.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar new file mode 100644 index 0000000000000000000000000000000000000000..ed4bb45df0ea4a6cb61ce9c7247b6de29ab4f98a Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.flywaydb.flyway-database-postgresql-10.17.1.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..816ea17eb611606001129921b297615bcd2419fd Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.glassfish.expressly.expressly-5.0.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..ffc43704764cd535486c28401c92f2548bd52c5f Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.invocation.jboss-invocation-2.0.0.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..0e9fcc76760c4fc3fe5fbce69a4a37783d912a11 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.jboss-transaction-spi-8.0.0.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..d7987d7c1b270f153557179abaf61c87ed62e875 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.logging.commons-logging-jboss-logging-1.0.0.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..8d218bacf88c766dba04ef14130fd7e69181ffed Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.logging.jboss-logging-annotations-3.0.1.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..5e90b7796c1a5d7264bc613050e05f20a674c010 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.narayana.jta.narayana-jta-7.0.2.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..fa3474de5d48916f2ded456deab5671a1375645b Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.narayana.jts.narayana-jts-integration-7.0.2.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..657afffffdf6324a62474b9499dd50cf9529609f Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.slf4j.slf4j-jboss-logmanager-2.0.0.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..26cbae47e5f0db078fc43501ed3d1b4f95a1befa Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jboss.threads.jboss-threads-3.6.1.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..2a66a92ba5247b5a1f8e201d11643e4fb11c2a8c Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.jctools.jctools-core-4.0.5.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..8da196f20fde587682295ac0c90f31ba4ab23815 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.locationtech.jts.jts-core-1.18.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..31cf0b60867242d385d764dcea99adadf7ed6ded Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.locationtech.spatial4j.spatial4j-0.8.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar new file mode 100644 index 0000000000000000000000000000000000000000..89c644b8e286e9da107d81de25f1be0fe6447607 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.lz4.lz4-java-1.8.0.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar new file mode 100644 index 0000000000000000000000000000000000000000..d530cd128ec0d314490c0e1e5ef68479cd23d366 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.noggit.noggit-0.8.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..091b4d13a417d635f5a1d7a42b482f88a6f3bf65 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.postgresql.postgresql-42.7.4.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar new file mode 100644 index 0000000000000000000000000000000000000000..e58c483f97589c9712eda2273a37e25344573390 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.reactivestreams.reactive-streams-1.0.4.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar new file mode 100644 index 0000000000000000000000000000000000000000..a2cb8020a5afda869b487e2f9d172dcd1e9795bf Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.slf4j.slf4j-api-2.0.6.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar new file mode 100644 index 0000000000000000000000000000000000000000..511ff3211d9b29bce06c3576ddcf0139fc874bb0 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.wildfly.common.wildfly-common-1.7.0.Final.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar new file mode 100644 index 0000000000000000000000000000000000000000..7707e5878b8525da8750949186a3ab1056ecb5c5 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.xerial.snappy.snappy-java-1.1.10.5.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar new file mode 100644 index 0000000000000000000000000000000000000000..275dd5700a389ba1902a02d49e465157942368ce Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/lib/main/org.yaml.snakeyaml-2.2.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/quarkus-app-dependencies.txt b/scorpio-broker/at-context-server/target/quarkus-app/quarkus-app-dependencies.txt new file mode 100644 index 0000000000000000000000000000000000000000..0aa0b1a4e39e9ac14c9739186a382f5a7784a7d6 --- /dev/null +++ b/scorpio-broker/at-context-server/target/quarkus-app/quarkus-app-dependencies.txt @@ -0,0 +1,258 @@ +com.aayushatharva.brotli4j:brotli4j::jar:1.16.0 +com.aayushatharva.brotli4j:native-linux-x86_64::jar:1.16.0 +com.aayushatharva.brotli4j:service::jar:1.16.0 +com.cronutils:cron-utils::jar:9.2.1 +com.fasterxml.jackson.core:jackson-annotations::jar:2.17.2 +com.fasterxml.jackson.core:jackson-core::jar:2.17.2 +com.fasterxml.jackson.core:jackson-databind::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-toml::jar:2.17.2 +com.fasterxml.jackson.dataformat:jackson-dataformat-yaml::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jdk8::jar:2.17.2 +com.fasterxml.jackson.datatype:jackson-datatype-jsr310::jar:2.17.2 +com.fasterxml.jackson.module:jackson-module-parameter-names::jar:2.17.2 +com.github.ben-manes.caffeine:caffeine::jar:3.1.5 +com.github.filosganga:geogson-core::jar:1.2.21 +com.github.filosganga:geogson-jts::jar:1.2.21 +com.github.luben:zstd-jni::jar:1.5.6-3 +com.google.code.gson:gson::jar:2.11.0 +com.google.errorprone:error_prone_annotations::jar:2.30.0 +com.google.guava:failureaccess::jar:1.0.1 +com.google.guava:guava::jar:33.2.1-jre +com.google.j2objc:j2objc-annotations::jar:2.8 +com.ongres.scram:client::jar:2.1 +com.ongres.scram:common::jar:2.1 +com.ongres.stringprep:saslprep::jar:1.1 +com.ongres.stringprep:stringprep::jar:1.1 +com.vividsolutions:jts-core::jar:1.14.0 +commons-codec:commons-codec::jar:1.17.1 +commons-io:commons-io::jar:2.16.1 +eu.neclab.ngsildbroker:commons::jar:5.0.5-SNAPSHOT +io.agroal:agroal-api::jar:2.5 +io.agroal:agroal-narayana::jar:2.5 +io.agroal:agroal-pool::jar:2.5 +io.github.crac:org-crac::jar:0.1.3 +io.netty:netty-buffer::jar:4.1.111.Final +io.netty:netty-codec-dns::jar:4.1.111.Final +io.netty:netty-codec-haproxy::jar:4.1.111.Final +io.netty:netty-codec-http2::jar:4.1.111.Final +io.netty:netty-codec-http::jar:4.1.111.Final +io.netty:netty-codec-mqtt::jar:4.1.111.Final +io.netty:netty-codec-socks::jar:4.1.111.Final +io.netty:netty-codec::jar:4.1.111.Final +io.netty:netty-common::jar:4.1.111.Final +io.netty:netty-handler-proxy::jar:4.1.111.Final +io.netty:netty-handler::jar:4.1.111.Final +io.netty:netty-resolver-dns::jar:4.1.111.Final +io.netty:netty-resolver::jar:4.1.111.Final +io.netty:netty-transport-classes-epoll::jar:4.1.111.Final +io.netty:netty-transport-classes-kqueue::jar:4.1.111.Final +io.netty:netty-transport-native-epoll:linux-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-kqueue:osx-x86_64:jar:4.1.111.Final +io.netty:netty-transport-native-unix-common::jar:4.1.111.Final +io.netty:netty-transport::jar:4.1.111.Final +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api-incubator::jar:2.5.0-alpha +io.opentelemetry.instrumentation:opentelemetry-instrumentation-api::jar:2.5.0 +io.opentelemetry.semconv:opentelemetry-semconv::jar:1.26.0-alpha +io.opentelemetry:opentelemetry-api-incubator::jar:1.39.0-alpha +io.opentelemetry:opentelemetry-api::jar:1.39.0 +io.opentelemetry:opentelemetry-context::jar:1.39.0 +io.quarkiverse.loggingmanager:quarkus-logging-manager::jar:3.1.2 +io.quarkiverse.systemd.notify:quarkus-systemd-notify::jar:1.0.2 +io.quarkus.arc:arc::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common-types::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-common::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-jackson::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive-vertx::jar:3.14.1 +io.quarkus.resteasy.reactive:resteasy-reactive::jar:3.14.1 +io.quarkus.security:quarkus-security::jar:2.1.0 +io.quarkus.vertx.utils:quarkus-vertx-utils::jar:3.14.1 +io.quarkus:quarkus-agroal::jar:3.14.1 +io.quarkus:quarkus-arc::jar:3.14.1 +io.quarkus:quarkus-bootstrap-runner::jar:3.14.1 +io.quarkus:quarkus-cache-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-cache::jar:3.14.1 +io.quarkus:quarkus-caffeine::jar:3.14.1 +io.quarkus:quarkus-classloader-commons::jar:3.14.1 +io.quarkus:quarkus-container-image-docker-common::jar:3.14.1 +io.quarkus:quarkus-container-image-docker::jar:3.14.1 +io.quarkus:quarkus-container-image::jar:3.14.1 +io.quarkus:quarkus-core::jar:3.14.1 +io.quarkus:quarkus-credentials::jar:3.14.1 +io.quarkus:quarkus-datasource-common::jar:3.14.1 +io.quarkus:quarkus-datasource::jar:3.14.1 +io.quarkus:quarkus-development-mode-spi::jar:3.14.1 +io.quarkus:quarkus-flyway-postgresql::jar:3.14.1 +io.quarkus:quarkus-flyway::jar:3.14.1 +io.quarkus:quarkus-fs-util::jar:0.0.10 +io.quarkus:quarkus-info-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-info::jar:3.14.1 +io.quarkus:quarkus-jackson::jar:3.14.1 +io.quarkus:quarkus-jdbc-postgresql::jar:3.14.1 +io.quarkus:quarkus-jsonp::jar:3.14.1 +io.quarkus:quarkus-kafka-client::jar:3.14.1 +io.quarkus:quarkus-messaging-kafka::jar:3.14.1 +io.quarkus:quarkus-messaging-kotlin::jar:3.14.1 +io.quarkus:quarkus-messaging::jar:3.14.1 +io.quarkus:quarkus-mutiny-reactive-streams-operators::jar:3.14.1 +io.quarkus:quarkus-mutiny::jar:3.14.1 +io.quarkus:quarkus-narayana-jta::jar:3.14.1 +io.quarkus:quarkus-netty::jar:3.14.1 +io.quarkus:quarkus-reactive-datasource::jar:3.14.1 +io.quarkus:quarkus-reactive-pg-client::jar:3.14.1 +io.quarkus:quarkus-rest-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson-common::jar:3.14.1 +io.quarkus:quarkus-rest-jackson::jar:3.14.1 +io.quarkus:quarkus-rest::jar:3.14.1 +io.quarkus:quarkus-scheduler-api::jar:3.14.1 +io.quarkus:quarkus-scheduler-common::jar:3.14.1 +io.quarkus:quarkus-scheduler-kotlin::jar:3.14.1 +io.quarkus:quarkus-scheduler-spi::jar:3.14.1 +io.quarkus:quarkus-scheduler::jar:3.14.1 +io.quarkus:quarkus-security-runtime-spi::jar:3.14.1 +io.quarkus:quarkus-smallrye-context-propagation::jar:3.14.1 +io.quarkus:quarkus-smallrye-health::jar:3.14.1 +io.quarkus:quarkus-smallrye-metrics::jar:3.14.1 +io.quarkus:quarkus-smallrye-openapi::jar:3.14.1 +io.quarkus:quarkus-swagger-ui::jar:3.14.1 +io.quarkus:quarkus-tls-registry::jar:3.14.1 +io.quarkus:quarkus-transaction-annotations::jar:3.14.1 +io.quarkus:quarkus-vertx-http::jar:3.14.1 +io.quarkus:quarkus-vertx-latebound-mdc-provider::jar:3.14.1 +io.quarkus:quarkus-vertx::jar:3.14.1 +io.quarkus:quarkus-virtual-threads::jar:3.14.1 +io.smallrye.common:smallrye-common-annotation::jar:2.5.0 +io.smallrye.common:smallrye-common-classloader::jar:2.5.0 +io.smallrye.common:smallrye-common-constraint::jar:2.5.0 +io.smallrye.common:smallrye-common-cpu::jar:2.5.0 +io.smallrye.common:smallrye-common-expression::jar:2.5.0 +io.smallrye.common:smallrye-common-function::jar:2.5.0 +io.smallrye.common:smallrye-common-io::jar:2.5.0 +io.smallrye.common:smallrye-common-net::jar:2.5.0 +io.smallrye.common:smallrye-common-os::jar:2.5.0 +io.smallrye.common:smallrye-common-ref::jar:2.5.0 +io.smallrye.common:smallrye-common-vertx-context::jar:2.5.0 +io.smallrye.config:smallrye-config-common::jar:3.9.1 +io.smallrye.config:smallrye-config-core::jar:3.9.1 +io.smallrye.config:smallrye-config::jar:3.9.1 +io.smallrye.reactive:mutiny-reactive-streams-operators::jar:2.6.2 +io.smallrye.reactive:mutiny-smallrye-context-propagation::jar:2.6.2 +io.smallrye.reactive:mutiny-zero-flow-adapters::jar:1.1.0 +io.smallrye.reactive:mutiny-zero::jar:1.1.0 +io.smallrye.reactive:mutiny::jar:2.6.2 +io.smallrye.reactive:smallrye-mutiny-vertx-auth-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-bridge-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-core::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-mqtt::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-pg-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-runtime::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-sql-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-uri-template::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-client::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web-common::jar:3.14.0 +io.smallrye.reactive:smallrye-mutiny-vertx-web::jar:3.14.0 +io.smallrye.reactive:smallrye-reactive-converter-api::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-converter-mutiny::jar:3.0.1 +io.smallrye.reactive:smallrye-reactive-messaging-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-camel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-health::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-in-memory::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka-api::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-kafka::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-otel::jar:4.24.0 +io.smallrye.reactive:smallrye-reactive-messaging-provider::jar:4.24.0 +io.smallrye.reactive:vertx-mutiny-generator::jar:3.14.0 +io.smallrye:jandex::jar:3.2.2 +io.smallrye:smallrye-context-propagation-api::jar:2.1.2 +io.smallrye:smallrye-context-propagation-jta::jar:2.1.2 +io.smallrye:smallrye-context-propagation-storage::jar:2.1.2 +io.smallrye:smallrye-context-propagation::jar:2.1.2 +io.smallrye:smallrye-fault-tolerance-vertx::jar:6.4.0 +io.smallrye:smallrye-health-api::jar:4.1.0 +io.smallrye:smallrye-health-provided-checks::jar:4.1.0 +io.smallrye:smallrye-health::jar:4.1.0 +io.smallrye:smallrye-metrics::jar:4.0.0 +io.smallrye:smallrye-open-api-core::jar:3.10.0 +io.vertx:vertx-auth-common::jar:4.5.9 +io.vertx:vertx-bridge-common::jar:4.5.9 +io.vertx:vertx-codegen::jar:4.5.9 +io.vertx:vertx-core::jar:4.5.9 +io.vertx:vertx-mqtt::jar:4.5.9 +io.vertx:vertx-pg-client::jar:4.5.9 +io.vertx:vertx-sql-client::jar:4.5.9 +io.vertx:vertx-uri-template::jar:4.5.9 +io.vertx:vertx-web-client::jar:4.5.9 +io.vertx:vertx-web-common::jar:4.5.9 +io.vertx:vertx-web::jar:4.5.9 +jakarta.activation:jakarta.activation-api::jar:2.1.3 +jakarta.annotation:jakarta.annotation-api::jar:3.0.0 +jakarta.el:jakarta.el-api::jar:5.0.1 +jakarta.enterprise:jakarta.enterprise.cdi-api::jar:4.1.0 +jakarta.enterprise:jakarta.enterprise.lang-model::jar:4.1.0 +jakarta.inject:jakarta.inject-api::jar:2.0.1 +jakarta.interceptor:jakarta.interceptor-api::jar:2.2.0 +jakarta.json:jakarta.json-api::jar:2.1.3 +jakarta.resource:jakarta.resource-api::jar:2.1.0 +jakarta.transaction:jakarta.transaction-api::jar:2.0.1 +jakarta.ws.rs:jakarta.ws.rs-api::jar:3.1.0 +jakarta.xml.bind:jakarta.xml.bind-api::jar:4.0.2 +org.apache.camel.quarkus:camel-quarkus-core::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-reactive-streams::jar:3.14.0 +org.apache.camel.quarkus:camel-quarkus-smallrye-reactive-messaging::jar:3.14.0 +org.apache.camel:camel-api::jar:4.7.0 +org.apache.camel:camel-base-engine::jar:4.7.0 +org.apache.camel:camel-base::jar:4.7.0 +org.apache.camel:camel-componentdsl::jar:4.7.0 +org.apache.camel:camel-core-catalog::jar:4.7.0 +org.apache.camel:camel-core-engine::jar:4.7.0 +org.apache.camel:camel-core-languages::jar:4.7.0 +org.apache.camel:camel-core-model::jar:4.7.0 +org.apache.camel:camel-core-processor::jar:4.7.0 +org.apache.camel:camel-core-reifier::jar:4.7.0 +org.apache.camel:camel-endpointdsl::jar:4.7.0 +org.apache.camel:camel-main::jar:4.7.0 +org.apache.camel:camel-management-api::jar:4.7.0 +org.apache.camel:camel-microprofile-config::jar:4.7.0 +org.apache.camel:camel-reactive-streams::jar:4.7.0 +org.apache.camel:camel-support::jar:4.7.0 +org.apache.camel:camel-tooling-model::jar:4.7.0 +org.apache.camel:camel-util-json::jar:4.7.0 +org.apache.camel:camel-util::jar:4.7.0 +org.apache.camel:camel-xml-jaxp-util::jar:4.7.0 +org.apache.commons:commons-lang3::jar:3.14.0 +org.apache.httpcomponents:httpclient-cache::jar:4.5.14 +org.apache.httpcomponents:httpclient::jar:4.5.14 +org.apache.httpcomponents:httpcore::jar:4.4.16 +org.apache.kafka:kafka-clients::jar:3.7.1 +org.checkerframework:checker-qual::jar:3.46.0 +org.eclipse.microprofile.config:microprofile-config-api::jar:3.1 +org.eclipse.microprofile.context-propagation:microprofile-context-propagation-api::jar:1.3 +org.eclipse.microprofile.health:microprofile-health-api::jar:4.0.1 +org.eclipse.microprofile.metrics:microprofile-metrics-api::jar:4.0.1 +org.eclipse.microprofile.openapi:microprofile-openapi-api::jar:3.1.1 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-api::jar:3.0 +org.eclipse.microprofile.reactive-streams-operators:microprofile-reactive-streams-operators-core::jar:3.0 +org.eclipse.parsson:parsson::jar:1.1.7 +org.flywaydb:flyway-core::jar:10.17.1 +org.flywaydb:flyway-database-postgresql::jar:10.17.1 +org.glassfish.expressly:expressly::jar:5.0.0 +org.jboss.invocation:jboss-invocation::jar:2.0.0.Final +org.jboss.logging:commons-logging-jboss-logging::jar:1.0.0.Final +org.jboss.logging:jboss-logging-annotations::jar:3.0.1.Final +org.jboss.logging:jboss-logging::jar:3.6.0.Final +org.jboss.logmanager:jboss-logmanager::jar:3.0.6.Final +org.jboss.narayana.jta:narayana-jta::jar:7.0.2.Final +org.jboss.narayana.jts:narayana-jts-integration::jar:7.0.2.Final +org.jboss.slf4j:slf4j-jboss-logmanager::jar:2.0.0.Final +org.jboss.threads:jboss-threads::jar:3.6.1.Final +org.jboss:jboss-transaction-spi::jar:8.0.0.Final +org.jctools:jctools-core::jar:4.0.5 +org.locationtech.jts:jts-core::jar:1.18.2 +org.locationtech.spatial4j:spatial4j::jar:0.8 +org.lz4:lz4-java::jar:1.8.0 +org.noggit:noggit::jar:0.8 +org.postgresql:postgresql::jar:42.7.4 +org.reactivestreams:reactive-streams::jar:1.0.4 +org.slf4j:slf4j-api::jar:2.0.6 +org.wildfly.common:wildfly-common::jar:1.7.0.Final +org.xerial.snappy:snappy-java::jar:1.1.10.5 +org.yaml:snakeyaml::jar:2.2 diff --git a/scorpio-broker/at-context-server/target/quarkus-app/quarkus-run.jar b/scorpio-broker/at-context-server/target/quarkus-app/quarkus-run.jar new file mode 100644 index 0000000000000000000000000000000000000000..e27491f527d35e9f52c432efe7739471102b0c33 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/quarkus-run.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/quarkus/generated-bytecode.jar b/scorpio-broker/at-context-server/target/quarkus-app/quarkus/generated-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..ca300dc7bd73c2527f04c391db33ec9f3e9da5b2 Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/quarkus/generated-bytecode.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/quarkus/quarkus-application.dat b/scorpio-broker/at-context-server/target/quarkus-app/quarkus/quarkus-application.dat new file mode 100644 index 0000000000000000000000000000000000000000..cf0b7721c3dbd5c0dd8a40418e97c939ab9649ea Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/quarkus/quarkus-application.dat differ diff --git a/scorpio-broker/at-context-server/target/quarkus-app/quarkus/transformed-bytecode.jar b/scorpio-broker/at-context-server/target/quarkus-app/quarkus/transformed-bytecode.jar new file mode 100644 index 0000000000000000000000000000000000000000..a1b6290f63604219b0ea9a1d95157abe37f069ad Binary files /dev/null and b/scorpio-broker/at-context-server/target/quarkus-app/quarkus/transformed-bytecode.jar differ diff --git a/scorpio-broker/at-context-server/target/quarkus-artifact.properties b/scorpio-broker/at-context-server/target/quarkus-artifact.properties new file mode 100644 index 0000000000000000000000000000000000000000..68a593e367edb904b6d1decf37b14e074195074c --- /dev/null +++ b/scorpio-broker/at-context-server/target/quarkus-artifact.properties @@ -0,0 +1,4 @@ +# Generated by Quarkus - Do not edit manually +metadata.container-image=scorpiobroker/at-context-server\:5.0.5-SNAPSHOT +metadata.pull-required=false +type=jar-container diff --git a/scorpio-broker/docker-compose-aaio-dev.yml b/scorpio-broker/docker-compose-aaio-dev.yml new file mode 100644 index 0000000000000000000000000000000000000000..9bb4f243f670e22bc84a9655689c057f25b3d706 --- /dev/null +++ b/scorpio-broker/docker-compose-aaio-dev.yml @@ -0,0 +1,44 @@ +version: '3' + +services: + zookeeper: + image: zookeeper + ports: + - "2181" + logging: + driver: none + kafka: + image: wurstmeister/kafka + hostname: kafka + ports: + - "9092" + environment: + KAFKA_ADVERTISED_HOST_NAME: kafka + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ADVERTISED_PORT: 9092 + KAFKA_LOG_RETENTION_MS: 10000 + KAFKA_LOG_RETENTION_CHECK_INTERVAL_MS: 5000 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + depends_on: + - zookeeper + logging: + driver: none + + postgres: + image: mdillon/postgis + ports: + - "5432" + environment: + POSTGRES_USER: ngb + POSTGRES_PASSWORD: ngb + POSTGRES_DB: ngb + logging: + driver: none + scorpio: + image: scorpiobroker/scorpio:scorpio-aaio_0.9.2-SNAPSHOT + ports: + - "9090:9090" + depends_on: + - kafka + - postgres diff --git a/scorpio-broker/docker-compose-aaio.yml b/scorpio-broker/docker-compose-aaio.yml new file mode 100644 index 0000000000000000000000000000000000000000..a87cb8e584db02815c4b03585a062af82c8d557d --- /dev/null +++ b/scorpio-broker/docker-compose-aaio.yml @@ -0,0 +1,35 @@ +version: '3' + +services: + zookeeper: + image: zookeeper + ports: + - "2181" + kafka: + image: wurstmeister/kafka + hostname: kafka + ports: + - "9092" + environment: + KAFKA_ADVERTISED_HOST_NAME: kafka + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ADVERTISED_PORT: 9092 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + depends_on: + - zookeeper + postgres: + image: mdillon/postgis + ports: + - "5432" + environment: + POSTGRES_USER: ngb + POSTGRES_PASSWORD: ngb + POSTGRES_DB: ngb + scorpio: + image: scorpiobroker/scorpio:scorpio-aaio_latest + ports: + - "9090:9090" + depends_on: + - kafka + - postgres diff --git a/scorpio-broker/docker-compose-dist-dev.yml b/scorpio-broker/docker-compose-dist-dev.yml new file mode 100644 index 0000000000000000000000000000000000000000..ac37cceeffdf5a6734fdc9c73113b33d091f4fd4 --- /dev/null +++ b/scorpio-broker/docker-compose-dist-dev.yml @@ -0,0 +1,98 @@ +version: '3' + +services: + zookeeper: + image: zookeeper + ports: + - "2181" + kafka: + image: wurstmeister/kafka + hostname: kafka + ports: + - "9092" + environment: + KAFKA_ADVERTISED_HOST_NAME: kafka + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ADVERTISED_PORT: 9092 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + depends_on: + - zookeeper + postgres: + image: mdillon/postgis + ports: + - "5432" + environment: + POSTGRES_USER: ngb + POSTGRES_PASSWORD: ngb + POSTGRES_DB: ngb + atctxsrv: + image: scorpiobroker/scorpio:AtContextServer_0.9.2-SNAPSHOT + ports: + - "27015" + depends_on: + - kafka + - eureka + cfgsrv: + image: scorpiobroker/scorpio:config-server_0.9.2-SNAPSHOT + ports: + - "8888" + depends_on: + - eureka + emgr: + image: scorpiobroker/scorpio:EntityManager_0.9.2-SNAPSHOT + ports: + - "1025" + depends_on: + - kafka + - eureka + eureka: + image: scorpiobroker/scorpio:eureka-server_0.9.2-SNAPSHOT + ports: + - "8761" + gateway: + image: scorpiobroker/scorpio:gateway_0.9.2-SNAPSHOT + ports: + - "9090:9090" + depends_on: + - eureka + histmgr: + image: scorpiobroker/scorpio:HistoryManager_0.9.2-SNAPSHOT + ports: + - "1040" + depends_on: + - kafka + - gateway + - eureka + qrymgr: + image: scorpiobroker/scorpio:QueryManager_0.9.2-SNAPSHOT + ports: + - "1026" + depends_on: + - kafka + - postgres + - eureka + regmgr: + image: scorpiobroker/scorpio:RegistryManager_0.9.2-SNAPSHOT + ports: + - "1030" + depends_on: + - kafka + - postgres + - gateway + - eureka + stomgr: + image: scorpiobroker/scorpio:StorageManager_0.9.2-SNAPSHOT + ports: + - "1029" + depends_on: + - kafka + - postgres + - eureka + sbsmgr: + image: scorpiobroker/scorpio:SubscriptionManager_0.9.2-SNAPSHOT + ports: + - "2025" + depends_on: + - kafka + - eureka diff --git a/scorpio-broker/docker-compose-dist.yml b/scorpio-broker/docker-compose-dist.yml new file mode 100644 index 0000000000000000000000000000000000000000..1d93486ef4f72de987d1a3bbc1848b6a9538ee95 --- /dev/null +++ b/scorpio-broker/docker-compose-dist.yml @@ -0,0 +1,101 @@ +version: '3' + +services: + zookeeper: + image: zookeeper + ports: + - "2181" + kafka: + image: wurstmeister/kafka + hostname: kafka + ports: + - "9092" + environment: + KAFKA_ADVERTISED_HOST_NAME: kafka + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ADVERTISED_PORT: 9092 + KAFKA_LOG_RETENTION_MS: 10000 + KAFKA_LOG_RETENTION_CHECK_INTERVAL_MS: 5000 + depends_on: + - zookeeper + postgres: + image: mdillon/postgis + ports: + - "5432" + environment: + POSTGRES_USER: ngb + POSTGRES_PASSWORD: ngb + POSTGRES_DB: ngb + atctxsrv: + image: scorpiobroker/scorpio:AtContextServer_1.0.0-SNAPSHOT + ports: + - "27015" + depends_on: + - kafka + - eureka + cfgsrv: + image: scorpiobroker/scorpio:config-server_1.0.0-SNAPSHOT + ports: + - "8888" + depends_on: + - eureka + emgr: + image: scorpiobroker/scorpio:EntityManager_1.0.0-SNAPSHOT + ports: + - "1025" + depends_on: + - kafka + - eureka + eureka: + image: scorpiobroker/scorpio:eureka-server_1.0.0-SNAPSHOT + ports: + - "8761" + environment: + - JAVA_OPTS=--add-opens java.base/java.util=ALL-UNNAMED + + gateway: + image: scorpiobroker/scorpio:gateway_1.0.0-SNAPSHOT + ports: + - "9090:9090" + depends_on: + - eureka + histmgr: + image: scorpiobroker/scorpio:HistoryManager_1.0.0-SNAPSHOT + ports: + - "1040" + depends_on: + - kafka + - gateway + - eureka + qrymgr: + image: scorpiobroker/scorpio:QueryManager_1.0.0-SNAPSHOT + ports: + - "1026" + depends_on: + - kafka + - postgres + - eureka + regmgr: + image: scorpiobroker/scorpio:RegistryManager_1.0.0-SNAPSHOT + ports: + - "1030" + depends_on: + - kafka + - postgres + - gateway + - eureka + stomgr: + image: scorpiobroker/scorpio:StorageManager_1.0.0-SNAPSHOT + ports: + - "1029" + depends_on: + - kafka + - postgres + - eureka + sbsmgr: + image: scorpiobroker/scorpio:SubscriptionManager_1.0.0-SNAPSHOT + ports: + - "2025" + depends_on: + - kafka + - eureka diff --git a/scorpio-broker/dockerfile4maven b/scorpio-broker/dockerfile4maven new file mode 100644 index 0000000000000000000000000000000000000000..1b98ed8d511b4a6293d9c4fac5656818b6d71073 --- /dev/null +++ b/scorpio-broker/dockerfile4maven @@ -0,0 +1,46 @@ +FROM openjdk:8-jre + +WORKDIR /usr/src/scorpio + + + +ARG BUILD_DIR_SCS +ARG BUILD_DIR_SES +ARG BUILD_DIR_SGW +ARG BUILD_DIR_SCR + +ARG JAR_FILE_BUILD_SCS +ARG JAR_FILE_BUILD_SES +ARG JAR_FILE_BUILD_SGW +ARG JAR_FILE_BUILD_SCR + + +ARG JAR_FILE_RUN_SCS +ARG JAR_FILE_RUN_SES +ARG JAR_FILE_RUN_SGW +ARG JAR_FILE_RUN_SCR + +COPY ${BUILD_DIR_SCS}/target/${JAR_FILE_BUILD_SCS} ./scs/${JAR_FILE_RUN_SCS} +COPY ${BUILD_DIR_SES}/target/${JAR_FILE_BUILD_SES} ./ses/${JAR_FILE_RUN_SES} +COPY ${BUILD_DIR_SGW}/target/${JAR_FILE_BUILD_SGW} ./sgw/${JAR_FILE_RUN_SGW} +COPY ${BUILD_DIR_SCR}/target/${JAR_FILE_BUILD_SCR} ./scr/${JAR_FILE_RUN_SCR} + + + +COPY ${BUILD_DIR_SES}/src/main/resources/application-aaio.yml ./ses/config/application.yml +COPY ${BUILD_DIR_SCS}/src/main/resources/application-aaio.yml ./scs/config/application.yml +COPY ${BUILD_DIR_SGW}/src/main/resources/application-aaio.yml ./sgw/config/application.yml +#COPY ${BUILD_DIR_SCR}/src/main/resources/application-aaio.yml ./scr/config/application.yml + +ENV sesdir ses +ENV sesjar ${JAR_FILE_RUN_SES} +ENV scsdir scs +ENV scsjar ${JAR_FILE_RUN_SCS} +ENV sgwdir sgw +ENV sgwjar ${JAR_FILE_RUN_SGW} +ENV scrdir scr +ENV scrjar ${JAR_FILE_RUN_SCR} +ENV spring_args "" +COPY run.sh ./ + +CMD bash ./run.sh ${spring_args} \ No newline at end of file diff --git a/scorpio-broker/docs/en/requirements.txt b/scorpio-broker/docs/en/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..23a84795430b03ec30d0f4debb8d89a9a33021bb --- /dev/null +++ b/scorpio-broker/docs/en/requirements.txt @@ -0,0 +1,3 @@ +sphinx +sphinx_rtd_theme +sphinx_tabs \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/API_walkthrough.rst b/scorpio-broker/docs/en/source/API_walkthrough.rst new file mode 100644 index 0000000000000000000000000000000000000000..7c711e9eec9c92f2f8cab1857cbad7b53affa37b --- /dev/null +++ b/scorpio-broker/docs/en/source/API_walkthrough.rst @@ -0,0 +1,1367 @@ +************ +Introduction +************ + +This walkthrough adopts a practical approach that we hope will help our readers to get familiar with NGSI-LD in general and the Scorpio Broker in particular - and have some fun in the process :). + +The walkthrough is based on the NGSI-LD Specification, that can be found in here [https://www.etsi.org/deliver/etsi_gs/CIM/001_099/009/01.02.02_60/gs_CIM009v010202p.pdf]. --> will become gs_CIM009v010301p.pdf soon ... +You should also have a look at the NGSI-LD implementation notes. --> once they are available +To get familiar with NGSI-LD, you may also have a look at the NGSI-LD Primer [https://www.etsi.org/deliver/etsi_gr/CIM/001_099/008/01.01.01_60/gr_CIM008v010101p.pdf] that is targeted at developers. + +The main section is about context management. It describes the basic context broker functionality for context management (information about entities, such as the temperature of a car). Context source management (information not about the entities themselves, but about the sources that can provide the information in a distributed system setup) is also described as part of this document. + +It is recommended to get familiar with the theoretical concepts on which the NGSI-LD model is based before starting. E.g. entities, properties, relationships etc. Have a look at the FIWARE documentation about this, e.g. this public presentation. [... find suitable presentation] + + +Starting the Scorpio Broker for the tutorials +############################################# + +In order to start the broker we recommend to use docker-compose. Get the docker-compose file from the github repo of Scorpio. +:: + + curl https://raw.githubusercontent.com/ScorpioBroker/ScorpioBroker/development/docker-compose-aaio.yml + +and start the container with +:: + + sudo docker-compose -f docker-compose-aaio.yml up + +You can also start the broker without docker. For further instructions please refer to the readme https://github.com/ScorpioBroker/ScorpioBroker/blob/development/README.md + + +Issuing commands to the broker +############################## + +To issue requests to the broker, you can use the curl command line tool. curl is chosen because it is almost ubiquitous in any GNU/Linux system and simplifies including examples in this document that can easily be copied and pasted. Of course, it is not mandatory to use it, you can use any REST client tool instead (e.g. RESTClient). Indeed, in a real case, you will probably interact with the Scorpio Broker using a programming language library implementing the REST client part of your application. + +The basic patterns for all the curl examples in this document are the following: + +For POST: +curl localhost:9090/ngsi-ld/v1/ -s -S [headers]' -d @- < -s -S [headers] -X PUT -d @- < -s -S [headers] -X PATCH -d @- < -s -S [headers] +For DELETE: +curl localhost:9090/ngsi-ld/v1/ -s -S [headers] -X DELETE +Regarding [headers] you have to include the following ones: + +Accept header to specify the payload format in which you want to receive the response. You should explicitly specify JSON or JSON-LD. +curl ... -H 'Accept: application/json' ... or curl ... -H 'Accept: application/ld-json' depending on whether you want to +receive the JSON-LD @context in a link header or in the body of the response (JSON-LD and the use of @context is described in the +following section). + +If using payload in the request (i.e. POST, PUT or PATCH), you have to supply the Context-Type HTTP header to specify the format (JSON or JSON-LD). +curl ... -H 'Content-Type: application/json' ... or -H 'Content-Type: application/ld+json' + +In case the JSON-LD @context is not provided as part of the request body, it has to be provided as a link header, e.g. +curl ... -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json" where the @context has to be retrievable from the first URI, i.e. in this example: https://uri.etsi.org/ngsi-ld/primer/store-context.jsonld + +Some additional remarks: + +Most of the time we are using multi-line shell commands to provide the input to curl, using EOF to mark the beginning and the end of the multi-line block (here-documents). In some cases (GET and DELETE) we omit -d @- as no payload is used. + +In the examples, it is assumed that the broker is listening on port 9090. Adjust this in the curl command line if you are using a different port. + +In order to pretty-print JSON in responses, you can use Python with msjon.tool (examples along with tutorial are using this style): + +(curl ... | python -mjson.tool) <; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json" + +Context Management +################## + +To show the use of @context, most examples in this tutorial will be done as application/ld+json having the @context entries in the body of the payload. +At the end of this section, you will have the basic knowledge to create applications (both context producers and consumers) using the Scorpio Broker with context management operations. + +*************** +Entity creation +*************** + +Assuming a fresh start we have an empty Scorpio Broker. +First, we are going to create house2:smartrooms:room1. Let's assume that at entity creation time, temperature is 23 ?C and it is part of smartcity:houses:house2. +:: + + curl localhost:9090/ngsi-ld/v1/entities -s -S -H 'Content-Type: application/ld+json' -d @- <; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + +The reply now looks like this. +:: + + { + "id": "smartcity:houses:house2", + "type": "House", + "hasRoom": [{ + "type": "Relationship", + "object": "house2:smartrooms:room1", + "datasetId": "somethingunique1" + }, + { + "type": "Relationship", + "object": "house2:smartrooms:room2", + "datasetId": "somethingunique2" + }], + "location": { + "type": "GeoProperty", + "value": { + "type": "Polygon", + "coordinates": [[[-8.5, 41.2], [-8.5000001, 41.2], [-8.5000001, 41.2000001], [-8.5, 41.2000001], [-8.5, 41.2]]] + } + }, + "entrance": { + "type": "GeoProperty", + "value": { + "type": "Point", + "coordinates": [-8.50000005, 41.2] + } + }, + "@context": [ "https://pastebin.com/raw/Mgxv2ykn" ] + } + +Since we provide the core context in our own @context it is not added to the result. +From here on we will use the custom @context so we can use the short names in all of our requests. + +You can also request an entity with a single specified attribute, using the attrs parameter. For example, to get only the location: +:: + + curl localhost:9090/ngsi-ld/v1/entities/smartcity%3Ahouses%3Ahouse2/?attrs=location -s -S -H 'Accept: application/ld+json' -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + +Response: +:: + + { + "id": "smartcity:houses:house2", + "type": "House", + "location": { + "type": "GeoProperty", + "value": { + "type": "Polygon", + "coordinates": [[[-8.5, 41.2], [-8.5000001, 41.2], [-8.5000001, 41.2000001], [-8.5, 41.2000001], [-8.5, 41.2]]] + } + }, + "@context": [ "https://pastebin.com/raw/Mgxv2ykn" ] + } + +Query +##### + +The second way to retrieve information is the NGSI-LD query. +For this example we first add a new Room which belongs to another house. +:: + + curl localhost:9090/ngsi-ld/v1/entities -s -S -H 'Content-Type: application/ld+json' -d @- <; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + +Note that this request has the accept header application/json, i.e. the link to the @context is returned in a link header. +The result is +:: + + [ + { + "id": "house2:smartrooms:room1", + "type": "Room", + "temperature": { + "value": 23, + "unitCode": "CEL", + "type": "Property", + "providedBy": { + "type": "Relationship", + "object": "smartbuilding:house2:sensor0815" + } + }, + "isPartOf": { + "type": "Relationship", + "object": "smartcity:houses:house2" + } + + }, + { + "id": "house2:smartrooms:room2", + "type": "Room", + "temperature": { + "value": 21, + "unitCode": "CEL", + "type": "Property" + "providedBy": { + "type": "Relationship", + "object": "smartbuilding:house2:sensor4711" + } + }, + "isPartOf": { + "type": "Relationship", + "object": "smartcity:houses:house2" + } + }, + { + "id": "house99:smartrooms:room42", + "type": "Room", + "temperature": { + "value": 21, + "unitCode": "CEL", + "type": "Property", + "providedBy": { + "type": "Relationship", + "object": "smartbuilding:house99:sensor36" + } + }, + "isPartOf": { + "type": "Relationship", + "object": "smartcity:houses:house99" + } + } + ] + +Filtering +######### + +NGSI-LD provides a lot of ways to filter Entities from query results (and subscription notifications respectively). +Since we are only interested in our smartcity:houses:house2, we are using the 'q' filter on the Relatioship isPartOf. +(URL encoding "smartcity:houses:house2" becomes %22smartcity%3Ahouses%3Ahouse2%22) +:: + + curl localhost:9090/ngsi-ld/v1/entities/?type=Room\&q=isPartOf==%22smartcity%3Ahouses%3Ahouse2%22 -s -S -H 'Accept: application/json' -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + +The results now looks like this. +:: + + [ + { + "id": "house2:smartrooms:room1", + "type": "Room", + "temperature": { + "value": 23, + "unitCode": "CEL", + "type": "Property", + "providedBy": { + "type": "Relationship", + "object": "smartbuilding:house2:sensor0815" + } + }, + "isPartOf": { + "type": "Relationship", + "object": "smartcity:houses:house2" + } + + }, + { + "id": "house2:smartrooms:room2", + "type": "Room", + "temperature": { + "value": 21, + "unitCode": "CEL", + "type": "Property" + "providedBy": { + "type": "Relationship", + "object": "smartbuilding:house2:sensor4711" + } + }, + "isPartOf": { + "type": "Relationship", + "object": "smartcity:houses:house2" + } + } + ] + +Now an alternative way to get the same result would be using the idPattern parameter, which allows you to use regular expressions. This is possible in this case since we structured our IDs for the rooms. +:: + + curl localhost:9090/ngsi-ld/v1/entities/?type=Room\&idPattern=house2%3Asmartrooms%3Aroom.%2A -s -S -H 'Accept: application/json' -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + (house2%3Asmartrooms%3Aroom.%2A == house2:smartrooms:room.*) + +Limit the attributes +#################### + +Additionally we now want to limit the result to only give us the temperature. This is done by using the attrs parameter. Attrs takes a comma seperated list. In our case since it's only one entry it looks like this. +:: + + curl localhost:9090/ngsi-ld/v1/entities/?type=Room&q=isPartOf==%22smartcity%3Ahouses%3Ahouse2%22\&attrs=temperature -s -S -H 'Accept: application/json' -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + +:: + + [ + { + "id": "house2:smartrooms:room1", + "type": "Room", + "temperature": { + "value": 23, + "unitCode": "CEL", + "type": "Property", + "providedBy": { + "type": "Relationship", + "object": "smartbuilding:house2:sensor0815" + } + } + + }, + { + "id": "house2:smartrooms:room2", + "type": "Room", + "temperature": { + "value": 21, + "unitCode": "CEL", + "type": "Property" + "providedBy": { + "type": "Relationship", + "object": "smartbuilding:house2:sensor4711" + } + } + } + ] + +KeyValues results +################# + +Now assuming we want to limit the payload of the request even more since we are really only interested in the value of temperature and don't care about any meta information. This can be done using the keyValues option. KeyValues will return a condenced version of the Entity providing only top level attribute and their respective value or object. +:: + + curl localhost:9090/ngsi-ld/v1/entities/?type=Room\&q=isPartOf==%22smartcity%3Ahouses%3Ahouse2%22\&attrs=temperature\&options=keyValues -s -S -H 'Accept: application/json' -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + +Response: +:: + + [ + { + "id": "house2:smartrooms:room1", + "type": "Room", + "temperature": 23 + }, + { + "id": "house2:smartrooms:room2", + "type": "Room", + "temperature": 21 + } + ] + +******************************************* +Updating an entity & appending to an entity +******************************************* + +NGSI-LD allows you to update entities (overwrite the current entry) but also to just append new attributes. +Additonally you can of course just update a specific attribute. +Taking the role of the Context Producer for the temperature for house2:smartrooms:room1 we will cover 5 scenarios. +1. Updating the entire entity to push new values. +2. Appending a new Property providing the humidity from the room. +3. Partially updating the value of the temperature. +4. Appending a new multi value entry to temperature providing the info in degree Kelvin +5. Updating the specific multi value entries for temperature and Fahrenheit. + +Update Entity +############# + +You can basically update every part of an entity with two exceptions. The type and the id are immutable. An update in NGSI-LD overwrites the existing entry. This means if you update an entity with a payload which does not contain a currently existing attribute it will be removed. +To update our room1 we will do an HTTP POST like this. +:: + + curl localhost:9090/ngsi-ld/v1/entities/house2%3Asmartrooms%3Aroom1 -s -S -H 'Content-Type: application/json' -H 'Link: https://pastebin.com/raw/Mgxv2ykn' -d @- </attrs/ +In order to update the temperature we do a POST like this +:: + + curl localhost:9090/ngsi-ld/v1/entities/house2%3Asmartrooms%3Aroom1/attrs/temperature -s -S -H 'Content-Type: application/json' -H 'Link: https://pastebin.com/raw/Mgxv2ykn' -d @- </attrs/ with the new attribute as payload. +Append in NGSI-LD by default will overwrite an existing entry. If this is not desired you can add the option parameter with noOverwrite to the URL like this /entities//attrs?options=noOverwrite. Now if we want to add an additional entry for the humidity in room1 we do an HTTP PATCH like this. +:: + + curl localhost:9090/ngsi-ld/v1/entities/house2%3Asmartrooms%3Aroom1/attrs -s -S -X PATCH -H 'Content-Type: application/json' -H 'Link: https://pastebin.com/raw/Mgxv2ykn' -d @- <:@]:[]/[[/]...] +So a subscription would generally look like this. +:: + + curl localhost:9090/ngsi-ld/v1/subscriptions -s -S -H 'Content-Type: application/ld+json' -d @- <; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + +we will get back our original registration and everything that has been registered with the type Room. + +Context Registry usage for normal queries & subscriptions +######################################################### + +A context registry entry can have multiple entries which are taken into consideration when normal queries or subscriptions arrive in Scorpio. +As you can see there is an entities entry similar to the one in the subscriptions. This is the first thing to be taken into consideration. +If you register a type, Scorpio will only forward a request which is matching that type. Similarly the location is used to decide if a query with geo query part should be forwarded. While you shouldn't overdo it, the more details you provide in a registration the more efficiently your system will be able to determine to which context source a request should be forwarded to. +Below you see an example with more properties set. +:: + + { + "id": "urn:ngsi-ld:ContextSourceRegistration:csr1a3459", + "type": "ContextSourceRegistration", + "name": "NameExample", + "description": "DescriptionExample", + "information": [ + { + "entities": [ + { + "type": "Vehicle" + } + ], + "properties": [ + "brandName", + "speed" + ], + "relationships": [ + "isParked" + ] + }, + { + "entities": [ + { + "idPattern": ".*downtown$", + "type": "OffStreetParking" + } + ] + } + ], + "endpoint": "http://my.csource.org:1026", + "location": "{ \"type\": \"Polygon\", \"coordinates\": [[[8.686752319335938,49.359122687528746],[8.742027282714844,49.3642654834877],[8.767433166503904,49.398462568451485],[8.768119812011719,49.42750021620163],[8.74305725097656,49.44781634951542],[8.669242858886719,49.43754770762113],[8.63525390625,49.41968407776289],[8.637657165527344,49.3995797187007],[8.663749694824219,49.36851347448498],[8.686752319335938,49.359122687528746]]] }" + } + +There are two entries in the information part. In the first you can see there are two additional entries describing the two properties and one relationship provided by that source. That means any query which asks for type Vehicle, without an attribute filter, will be forwarded to this source and if there is an attribute filter it will only be forwarded if the registered properties or relationships match. The second entry means that this source can provide Entities of type OffStreetParking, which have an Entity ID ending with "downtown". diff --git a/scorpio-broker/docs/en/source/HelloWorld.rst b/scorpio-broker/docs/en/source/HelloWorld.rst new file mode 100644 index 0000000000000000000000000000000000000000..bc552b293d90cc0da0253bdbd2009cd0d6be3959 --- /dev/null +++ b/scorpio-broker/docs/en/source/HelloWorld.rst @@ -0,0 +1,43 @@ +******************* +Hello World example +******************* + +Generally speaking you can Create entities which is like the hello world program for Scorpio Broker by sending an HTTP POST request to *http://localhost:9090/ngsi-ld/v1/entities/* with a payload like this + +.. code-block:: JSON + + curl localhost:9090/ngsi-ld/v1/entities -s -S -H 'Content-Type: application/json' -d @- + { + "id": "urn:ngsi-ld:testunit:123", + "type": "AirQualityObserved", + "dateObserved": { + "type": "Property", + "value": { + "@type": "DateTime", + "@value": "2018-08-07T12:00:00Z" + } + }, + "NO2": { + "type": "Property", + "value": 22, + "unitCode": "GP", + "accuracy": { + "type": "Property", + "value": 0.95 + } + }, + "refPointOfInterest": { + "type": "Relationship", + "object": "urn:ngsi-ld:PointOfInterest:RZ:MainSquare" + }, + "@context": [ + "https://schema.lab.fiware.org/ld/context", + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld" + ] + } + +In the given example the @context is in the payload therefore you have to set the ContentType header to application/ld+json + +To receive entities you can send an HTTP GET to + + **http://localhost:9090/ngsi-ld/v1/entities/** \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/Makefile b/scorpio-broker/docs/en/source/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..b72cceb2f90e31b51515cf78007ac23a4b6fc88d --- /dev/null +++ b/scorpio-broker/docs/en/source/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXPROJ = ScorpioBroker +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/Payloads/FIWARE Linked Data Subscriptions and Registrations.postman_collection.json b/scorpio-broker/docs/en/source/Payloads/FIWARE Linked Data Subscriptions and Registrations.postman_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..91e436da204ac85d9662d50c1b094dfd7dc2aff5 --- /dev/null +++ b/scorpio-broker/docs/en/source/Payloads/FIWARE Linked Data Subscriptions and Registrations.postman_collection.json @@ -0,0 +1,611 @@ +{ + "info": { + "_postman_id": "219b5883-cdc2-4dc2-8554-d22f6e13e8f8", + "name": "FIWARE Linked Data Subscriptions and Registrations", + "description": "This tutorial discusses the usage of subscriptions and registrations within NGSI-LD and highlights the similarities and\ndifferences between the equivalent NGSI-v2 and NGSI-LD operations. The tutorial is an analogue of the original\ncontext-provider and subscriptions tutorials but uses API calls from the **NGSI-LD** interface throughout.\n\nThe `docker-compose` files for this tutorial can be found on GitHub: \n\n![GitHub](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/icon/GitHub-Mark-32px.png) [FIWARE 604. Linked Data Subscriptions and Registrations](https://github.com/Fiware/tutorials.LD-Subscriptions-Registrations)\n\n\n# Understanding Linked Data Subscriptions and Registrations\n\n> “Do not repeat after me words that you do not understand. Do not merely put on a mask of my ideas, for it will be an\n> illusion and you will thereby deceive yourself.â€\n>\n> ― Jiddu Krishnamurti\n\nNGSI-LD Subscriptions and Registrations provide the basic mechanism to allow the components within a Smart Linked Data\nSolution to interact with each other.\n\nAs a brief reminder, within a distributed system, subscriptions inform a third party component that a change in the\ncontext data has occurred (and the component needs to take further actions), whereas registrations tell the context\nbroker that additional context information is available from another source.\n\nBoth of these operations require that the receiving component fully understands the requests it receives, and is capable\nof creating and interpreting the resultant payloads. The differences here between NGSI-v2 and NGSI-LD operations is\nsmall, but there has been a minor amendment to facilite the incorporation of linked data concepts, and therefore the\ncontract between the various components has changed to include minor updates.\n\n## Entities within a stock management system\n\nThe relationship between our Linked Data entities is defined as shown, in addition to the existing data, the `tweets`\nattribute will be supplied by a _Context Provider_. In all other respects this model remains the same as the\n[previous tutorial](https://github.com/FIWARE/tutorials.Working-with-Linked-Data/) :\n\n![](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/entities.png)\n\n## Stock Management frontend\n\nThe simple Node.js Express application has updated to use NGSI-LD in the previous\n[tutorial](https://github.com/FIWARE/tutorials.Working-with-Linked-Data/). We will use the monitor page to watch the\nstatus of recent requests, and a two store pages to buy products. Once the services are running these pages can be\naccessed from the following URLs:\n\n#### Event Monitor\n\nThe event monitor can be found at: `http://localhost:3000/app/monitor`\n\n![FIWARE Monitor](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/monitor.png)\n\n#### Store 001\n\nStore001 can be found at: `http://localhost:3000/app/store/urn:ngsi-ld:Building:store001`\n\n![Store](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/store.png)\n\n#### Store 002\n\nStore002 can be found at: `http://localhost:3000/app/store/urn:ngsi-ld:Building:store002`\n\n![Store2](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/store2.png)\n", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" + }, + "item": [ + { + "name": "Using Subscriptions with NGSI-LD", + "item": [ + { + "name": "Create a Subscription ( Store 1) - Low Stock", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json", + "type": "text", + "name": "Accept" + }, + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text", + "disabled": true + }, + { + "key": "Accept", + "value": "application/ld+json" + } + ], + "body": { + "mode": "raw", + "raw": "{\n\t\"description\": \"Notify me of low stock in Store 001\",\n\t\"type\": \"Subscription\",\n\t\"entities\": [{\"type\": \"Shelf\"}],\n\t\"watchedAttributes\": [\"numberOfItems\"],\n\t\"q\": \"numberOfItems<10;locatedIn==urn:ngsi-ld:Building:store001\",\n\t\"notification\": {\n\t\t\"attributes\": [\"numberOfItems\", \"stocks\", \"locatedIn\"],\n\t\t\"format\": \"keyValues\",\n\t\t\"endpoint\": {\n\t\t\t\"uri\": \"http://tutorial:3000/subscription/low-stock-store001\",\n\t\t\t\"accept\": \"application/json\"\n\t\t}\n\t},\n \"@context\": \"https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld\"\n}\n " + }, + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/subscriptions/", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "subscriptions", + "" + ], + "query": [ + { + "key": "options", + "value": "keyValues", + "disabled": true + } + ] + }, + "description": "NGSI-LD subscriptions can be set up using the `/ngsi-ld/v1/subscriptions/` endpoint and in a similar manner to the\nNGSI-v2 `/v2/subscriptions` endpoint. The payload body is slightly different however. Firstly the linked data `@context`\nmust be present either as an attribute or in the `Link` header. If the `@context` is placed in the body the\n`Context-Type` header must state that the payload is `application/ld+json` - i.e. Linked Data plus JSON. The supplied\n`@context` will also be used when making notifications as part of the notification request.\n\nThe `type` of the NGSI-LD subscription request is always `type=Subscription`. The structure of the subscription has\nchanged. When setting up a subscription, there is no longer a separate `subject` section to the payload, entities to\nwatch and trigger conditions are now set at the same level as the `description` of the subscription.\n\n- `condition.attrs` has been moved up a level and renamed to `watchedAttributes`\n- `condition.expression` has been moved up a level and renamed to `q`\n\nThe `notification` section of the body states that once the conditions of the subscription have been met, a POST request\ncontaining all affected Shelf entities will be sent to the URL `http://tutorial:3000/subscription/low-stock-store001`.\nIt is now possible to amend the notification payload by requesting `notification.format=keyValues` and remove the\n`@context` from the notification body by stating `notification.endpoint.accept=application/json`. The `@context` is not\nlost, it is merely passed as a `Link` header. In summary, all of the flags within a subscription work in the same manner\nas a GET request to the context broker itself. If no flags are set, a full NGSI-LD response including the `@context` is\nreturned by default, and the payload can be reduced and amended by adding in further restriction" + }, + "response": [] + }, + { + "name": "Create a Subscription ( Store 2) - Low Stock", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "name": "Accept", + "type": "text", + "value": "application/json" + }, + { + "key": "Link", + "type": "text", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "disabled": true + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "{\n\t\"description\": \"Notify me of low stock in Store 002\",\n\t\"type\": \"Subscription\",\n\t\"entities\": [{\"type\": \"Shelf\"}],\n\t\"watchedAttributes\": [\"numberOfItems\"],\n\t\"q\": \"numberOfItems<10;locatedIn==urn:ngsi-ld:Building:store002\",\n\t\"notification\": {\n\t\t\"attributes\": [\"numberOfItems\", \"stocks\", \"locatedIn\"],\n\t\t\"format\": \"keyValues\",\n\t\t\"endpoint\": {\n\t\t\t\"uri\": \"http://tutorial:3000/subscription/low-stock-store002\",\n\t\t\t\"accept\": \"application/json\"\n\t\t}\n\t},\n \"@context\": \"https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld\"\n}\n " + }, + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/subscriptions/", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "subscriptions", + "" + ], + "query": [ + { + "key": "options", + "value": "keyValues", + "disabled": true + } + ] + }, + "description": "This second request fires notifications to a different endpoint (URL\n`http://tutorial:3000/subscription/low-stock-store002`.) The `notification.format=normalized` and\n`notification.endpoint.accept=application/ld+json` will ensure that the `@context` is passed in the body of the\nnotification request and that the payload will consist of the expanded entities." + }, + "response": [] + }, + { + "name": "Read Subscription Details", + "protocolProfileBehavior": { + "disableBodyPruning": true + }, + "request": { + "method": "GET", + "header": [ + { + "key": "Content-Type", + "name": "Accept", + "type": "text", + "value": "application/json" + }, + { + "key": "Link", + "type": "text", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "disabled": true + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "" + }, + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/subscriptions/", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "subscriptions", + "" + ], + "query": [ + { + "key": "options", + "value": "keyValues", + "disabled": true + } + ] + }, + "description": "Subscription details can be read by making a GET request to the `/ngsi-ld/v1/subscriptions/`. All subscription CRUD\nactions continue to be mapped to the same HTTP verbs as before. Adding the `Accept: application/json` will remove the\n`@context` element from the response body.\n\n\nThe response consists of the details of the subscriptions within the system. The parameters within the `q` attribute\nhave been expanded to use the full URIs, as internally the broker consistently uses long names. The differences between\nthe payloads offered by the two subscriptions will be discussed below." + }, + "response": [] + } + ], + "description": "Goto `http://localhost:3000/app/store/urn:ngsi-ld:Building:store001` to display and interact with the Supermarket data.", + "event": [ + { + "listen": "prerequest", + "script": { + "id": "38a4bfe9-3203-46d5-ad26-da4685f6c6db", + "type": "text/javascript", + "exec": [ + "" + ] + } + }, + { + "listen": "test", + "script": { + "id": "cda29993-7539-442a-93f6-13f9e04ebe44", + "type": "text/javascript", + "exec": [ + "" + ] + } + } + ], + "protocolProfileBehavior": {} + }, + { + "name": "Retrieving Subscription Events", + "item": [], + "description": "Open two tabs on a browser. Go to the event monitor (`http://localhost:3000/app/monitor`) to see the payloads that are\nreceived when a subscription fires, and then go to store001\n(`http://localhost:3000/app/store/urn:ngsi-ld:Building:store001`) and buy beer until less than 10 items are in stock.\nThe low stock message should be displayed on screen.\n\n![low-stock](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/low-stock-warehouse.png)\n\n`low-stock-store001` is fired when the Products on the shelves within Store001 are getting low, the subscription payload\ncan be seen below:\n\n![low-stock-json](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/low-stock-monitor.png)\n\nThe data within the payload consists of key-value pairs of the attributes which were specified in the request. This is\nbecause the subscription was created using the `format=keyValues` attribute. The `@context` is not present in the\npayload body since `endpoint.accept=application/json` was set. The effect is to return a `data` array in a very similar\nformat to the `v2/subscription/` payload. In addition to the `data` array, the `subscriptionId` is included in the\nresponse, along with a `notifiedAt` element which describes when the notification was fired.\n\nNow go to store002 (`http://localhost:3000/app/store/urn:ngsi-ld:Building:store002`) and buy beer until fewer than 10\nitems are in stock. The low stock message is once again displayed on screen, the payload can be seen within the event\nmonitor.\n\n![low-stock-ld](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/low-stock-monitor-ld.png)\n\nThe second subscription has been set up to pass the full normalized NGSI-LD payload along with the `@context`. This has\nbeen achieved by using the using the `format=normalized` attribute within the subscription itself, as well as setting\n`endpoint.accept=application/ld+json`, so that the `@context` is also passed with each entity.", + "protocolProfileBehavior": {} + }, + { + "name": "Using Registrations with NGSI-LD", + "item": [ + { + "name": "Create a registration", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json", + "type": "text" + }, + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": " {\n \"type\": \"ContextSourceRegistration\",\n \"information\": [\n {\n \"entities\": [\n {\n \"type\": \"Building\",\n \"id\": \"urn:ngsi-ld:Building:store001\"\n }\n ],\n \"properties\": [\n \"tweets\"\n ]\n }\n ],\n \"endpoint\": \"http://context-provider:3000/static/tweets\"\n}" + }, + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/csourceRegistrations/", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "csourceRegistrations", + "" + ] + }, + "description": "All NGSI-LD Context Provider Registration actions take place on the `/ngsi-ld/v1/csourceRegistrations/` endpoint. The\nstandard CRUD mappings apply. The `@context` must be passed either as a `Link` header or within the main body of the\nrequest.\n\nThe body of the request is similar to the NGSI-v2 equivalent with the following modifications:\n\n- The NGSI-v2 `dataProvided` object is now an array called `information`.\n- NGSI-v2 `attrs` have been split into separate arrays of `properties` and `relationships`\n- The NGSI-v2 `provider.url` has moved up to `endpoint`" + }, + "response": [] + }, + { + "name": "Check the registration", + "request": { + "method": "GET", + "header": [ + { + "key": "Accept", + "value": "application/ld+json", + "type": "text" + }, + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/csourceRegistrations/", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "csourceRegistrations", + "" + ] + }, + "description": "Retrieving the registration details can be made by sending a GET request to the `/ngsi-ld/v1/csourceRegistrations/`\nendpoint, along with an appropriate JSON-LD context in the `Link` header.\n\nThe response returns the details of the registration. In this case the short names of the `properties` have been\nreturned, along with the `@context`." + }, + "response": [] + }, + { + "name": "Read from Store 1", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Content-Type", + "value": "application/json", + "type": "text" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Building:store001" + ], + "query": [ + { + "key": "options", + "value": "keyValues", + "disabled": true + } + ] + }, + "description": "Once a registration has been set up, the additional registered `properties` and `relationships` are transparently\nreturned when an requested entity is requested. For simple registrations, a request to obtain the whole entity will be\nproxied to the registered `endpoint`, for partial registrations the `properties` and `relationships` are added to the\nexisting entity held within the context broker.\n\nThe response now holds an additional `tweets` Property, which returns the values obtained from\n`http://context-provider:3000/static/tweets/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001` - i.e. the forwarding\nendpoint.\n\nThe same response data can be seen within the supermarket application itself. In practice this data has been created via\na series of requests - the context broker is responsible for the `urn:ngsi-ld:Building:store001` data, however it checks\nto see if any further information can be provided from other sources. In our case the `CSourceRegistration` indicates\nthat one further attribute _may_ be available. The broker then requests `tweets` information from the context provider,\nand provided that it responds in a timely manner, the `tweets` information is added to the resultant payload.\n\nThe supermarket application displays the received data on screen within the supermarket application itself:\n\n![tweets-1](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/tweets-1.png)" + }, + "response": [] + }, + { + "name": "Read direct from Context Provider", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Content-Type", + "value": "application/ld+json", + "type": "text" + } + ], + "url": { + "raw": "http://{{context-provider}}/static/tweets/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001?attrs=tweets", + "protocol": "http", + "host": [ + "{{context-provider}}" + ], + "path": [ + "static", + "tweets", + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Building:store001" + ], + "query": [ + { + "key": "options", + "value": "keyValues", + "disabled": true + }, + { + "key": "attrs", + "value": "tweets" + } + ] + }, + "description": "Every context-provider must stand by a fixed contract. At a minimum must be able to respond to varieties of the\n`/ngsi-ld/v1/entities/` GET request. If the registration is limited to certain properties, this request will\nalso contain an `attrs` parameter in the query string.\n\nDependent upon the use case of the context-provider, it may or may not need to be able to interpret JSON-LD `@context` -\nin this case a request is merely returning the full `tweets` attribute.\n\nThe same request is made by the context broker itself when querying for registered attributes\n\nAs can be seen the `@context` has been returned in the request (since the `Content-Type` header was set). The rest of\nthe response resembles any standard NGSI-LD request." + }, + "response": [] + }, + { + "name": "Direct Update to Context Provider", + "request": { + "method": "PATCH", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Content-Type", + "value": "application/json", + "type": "text", + "name": "Accept" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "{ \n\t\"tweets\": { \n\t\t\"type\": \"Property\", \n\t\t\"value\": [\n\t\t\t\"Space is big.\",\n\t\t\t\"You just won't believe how vastly, hugely, mind-bogglingly big it is.\",\n\t\t\t\"I mean, you may think it's a long way down the road to the chemist's, but that's just peanuts to space.\"\n\t\t] \n\t} \n}" + }, + "url": { + "raw": "http://{{context-provider}}/static/tweets/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001/attrs", + "protocol": "http", + "host": [ + "{{context-provider}}" + ], + "path": [ + "static", + "tweets", + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Building:store001", + "attrs" + ], + "query": [ + { + "key": "options", + "value": "keyValues", + "disabled": true + } + ] + }, + "description": "For a read-write interface it is also possible to amend context data by making a PATCH request to the relevant\n`ngsi-ld/v1/entities//attrs` endpoint." + }, + "response": [] + }, + { + "name": "Read from Store 1", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Content-Type", + "value": "application/json", + "type": "text" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Building:store001" + ], + "query": [ + { + "key": "options", + "value": "keyValues", + "disabled": true + } + ] + }, + "description": "If the regisitered attribute is requested from the context broker, it returns the _updated_ values obtained from\n`http://context-provider:3000/static/tweets/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001` - i.e. the forwarding\nendpoint.\n\nThis alters the response to match the values updated in the previous PATCH request.\n\nSince the context provider is responsible for supplying `tweets` information, changes in the context provider will\nalways be reflected in requests to the context-broker itself. The supermarket application is calling the context broker\nfor context regardless of origin, so the updated `tweets` data are displayed on screen within the supermarket\napplication itself:\n\n![tweets-2](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/tweets-2.png)\n\nThe context broker is therefore able to return a complete holistic picture of the current state of the world." + }, + "response": [] + }, + { + "name": "Forwarded Update", + "request": { + "method": "PATCH", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Content-Type", + "value": "application/json", + "type": "text", + "name": "Accept" + }, + { + "key": "Accept", + "value": "application/json" + } + ], + "body": { + "mode": "raw", + "raw": "{ \n\t\"type\": \"Property\", \n\t\"value\": [\n\t\t\"This must be Thursday\",\n\t\t\"I never could get the hang of Thursdays.\"\t\n\t] \n} " + }, + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001/attrs/tweets", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Building:store001", + "attrs", + "tweets" + ], + "query": [ + { + "key": "options", + "value": "keyValues", + "disabled": true + } + ] + }, + "description": "A PATCH request to the context broker ( either `ngsi-ld/v1/entities//` or\n`ngsi-ld/v1/entities//attrs`) will be forwarded to the registered context provider if a registration is\nfound. It is therefore possible to alter the state of a context-provider as a side effect. Of course, not all context\nproviders are necessarily read-write, so attempting to change the attributes of forwarded context may not be fully\nrespected.\n\nIn this case however a request to PATCH `ngsi-ld/v1/entities/` will be successfully forwarded as a series of\n`ngsi-ld/v1/entities//attrs` requests for each regsitered attribute that is found in the registration." + }, + "response": [] + }, + { + "name": "Read from Store 1", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Content-Type", + "value": "application/json", + "type": "text" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Building:store001" + ], + "query": [ + { + "key": "options", + "value": "keyValues", + "disabled": true + } + ] + }, + "description": "The result of the previous operation can be seen by retrieving the whole entity using a GET request.\n\nThis alters the response to match the values updated in the previous PATCH request which was sent to the context broker\nand then forwarded to the context provider endpoint.\n\nAs can be seen, the updated `tweets` data is also displayed within the supermarket application itself:\n\n![tweets-3](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/tweets-3.png)" + }, + "response": [] + } + ], + "description": "Context Registrations allow some (or all) data within an entity to be provided by an external context provider. It could\nbe another full context-provider a separate micro-service which only responds to a subset of the NGSI-LD endpoints.\nHowever, there needs to be a contract created as to who supplies what.\n\nAll registrations can be subdivided into one of two types. Simple registrations where a single context provider is\nresponsible for the maintenance of the whole entity, and partial registrations where attributes are spread across\nmultiple context providers. For a simple registration, all context requests are forwarded\n\n| Request | Action at **Context Broker** | Action at **Context Provider** |\n| ---------- | --------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- |\n| **GET** | Pass request to **Context Provider**, proxy the response back unaltered. | Respond to context broker with the result of the GET request based on the entities held internally |\n| **PATCH** | Pass request to **Context Provider**, proxy back the HTTP back status code. | Update the entity within the **Context Provider**, Respond to the context broker with a status code |\n| **DELETE** | Pass request to **Context Provider** | Delete the entity within the **Context Provider**, Respond to the context broker with a status code |\n\nEffectively every simple registration is saying _\"this entity is held elsewhere\"_, but the entity data can be requested\nand modified via requests to this context broker.\n\nFor partial registrations the situation is more complex\n\n| Request | Action at **Context Broker** | Action at **Context Provider** |\n| ---------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------ |\n| **GET** | Assuming an entity exists locally, pass request for additional proxied attributes to **Context Provider**, concatenate a response back for locally held attributes and additional information from the **Context Provider** | Respond to context broker with the result of the GET request based on the entities held internally |\n| **PATCH** | Update any locally held attributes, Pass update requests for additional attributes to **Context Provider**, and return **success** or **partial success** HTTP status code dependent upon the overall result. | Update the requested attributes of the entity held within the **Context Provider**. Respond to the context broker with a status code |\n| **DELETE** | If deleting an entity, remove the complete local instance. If deleting locally held attributes remove them. If deleting attributes held in the **Context Provider**, pass request on to **Context Provider** | Delete the entity attributes within the **Context Provider**, Respond to the context broker with a status code |\n\nEach partial registration is saying _\"additional augmented context for this entity is held elsewhere\"_. The entity data\ncan be requested and modified via requests to this context broker.\n\nWith normal operation, the NGSI-LD response does not expose whether data collated from multiple sources is held directly\nwithin the context broker or whether the information has been retrieved externally. It is only when an error occurs\n(e.g. timeout) that the HTTP status error code reveals that externally held information could not be retrieved or\namended.", + "event": [ + { + "listen": "prerequest", + "script": { + "id": "ac72ad1e-22ce-400a-b39e-284b1c4499cc", + "type": "text/javascript", + "exec": [ + "" + ] + } + }, + { + "listen": "test", + "script": { + "id": "f2fca775-2703-43e2-b588-a2aace5f0506", + "type": "text/javascript", + "exec": [ + "" + ] + } + } + ], + "protocolProfileBehavior": {} + } + ], + "event": [ + { + "listen": "prerequest", + "script": { + "id": "161d2877-2de6-48db-9c35-98bc41d16bc1", + "type": "text/javascript", + "exec": [ + "" + ] + } + }, + { + "listen": "test", + "script": { + "id": "a0ec1be8-bdcc-4c50-b4a4-48c36f2528ed", + "type": "text/javascript", + "exec": [ + "" + ] + } + } + ], + "variable": [ + { + "id": "c415f017-7801-4a12-ad13-2e3291a389ec", + "key": "ScorpioBroker", + "value": "localhost:1026" + }, + { + "id": "03ce0e75-7e45-46cc-8c8c-06b30ae02cc1", + "key": "datamodels-context.jsonld", + "value": "https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld" + }, + { + "id": "310ed897-c65f-493e-ba72-e9b13476d5f1", + "key": "device-edge", + "value": "localhost:1027" + }, + { + "id": "ba8689d9-75c8-488a-bee2-62319b015d3d", + "key": "context-provider", + "value": "localhost:3000" + } + ], + "protocolProfileBehavior": {} +} \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/Payloads/FIWARE Linked Data.postman_collection.json b/scorpio-broker/docs/en/source/Payloads/FIWARE Linked Data.postman_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..583b7e62a159d80f10b9b0fd2dcbf9cb860f0b1f --- /dev/null +++ b/scorpio-broker/docs/en/source/Payloads/FIWARE Linked Data.postman_collection.json @@ -0,0 +1,345 @@ +{ + "info": { + "_postman_id": "0cac323e-4ae9-451e-af75-750adb41356a", + "name": "FIWARE Linked Data", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" + }, + "item": [ + { + "name": "Obtaining Version Information", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "http://{{ScorpioBroker}}/version/", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "version", + "" + ] + }, + "description": "Once the Orion Context Broker is running, You can check the status by making an HTTP request to the exposed port. \nIf the response is blank, this is usually because the MongoDB database holding the context information is not running or not connected.\n\n>**Troubleshooting:** What if the response is blank ?\n>\n> * To check that a docker container is running try\n>\n>```bash\n>docker ps\n>```\n>\n>You should see two containers running. If orion is not running, you can restart the containers as necessary." + }, + "response": [] + }, + { + "name": "Creating your first Data Entity", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"id\": \"urn:ngsi-ld:Building:store001\",\n \"type\": \"Building\",\n \"category\": {\n \t\"type\": \"Property\",\n \"value\": [\"commercial\"]\n },\n \"address\": {\n \"type\": \"Property\",\n \"value\": {\n \"streetAddress\": \"Bornholmer Straße 65\",\n \"addressRegion\": \"Berlin\",\n \"addressLocality\": \"Prenzlauer Berg\",\n \"postalCode\": \"10439\"\n }\n },\n \"location\": {\n \"type\": \"GeoProperty\",\n \"value\": {\n \"type\": \"Point\",\n \"coordinates\": [13.3986, 52.5547]\n }\n },\n \"name\": {\n \"type\": \"Property\",\n \"value\": \"Bösebrücke Einkauf\"\n },\n \"@context\": [\n \"https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld\",\n \"https://schema.lab.fiware.org/ld/fiware-datamodels-context.jsonld\"\n ]\n}" + }, + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ] + }, + "description": "## Creating Context Data\n\nAt its heart, FIWARE is a system for managing context information, so lets add some context data into the system by creating a new entity (a store in **Berlin**). Any entity must have a `id` and `type` attributes, additional attributes are optional and will depend on the system being described. Each additional attribute should also have a defined `type` and a `value` attribute." + }, + "response": [] + }, + { + "name": "Creating your Second Data Entity", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"id\": \"urn:ngsi-ld:Building:store002\",\n \"type\": \"Building\",\n \"category\": {\n \t\"type\": \"Property\",\n \"value\": [\"commercial\"]\n },\n \"address\": {\n \"type\": \"Property\",\n \"value\": {\n \"streetAddress\": \"Friedrichstraße 44\",\n \"addressRegion\": \"Berlin\",\n \"addressLocality\": \"Kreuzberg\",\n \"postalCode\": \"10969\"\n }\n },\n \"location\": {\n \"type\": \"GeoProperty\",\n \"value\": {\n \"type\": \"Point\",\n \"coordinates\": [13.3903, 52.5075]\n }\n },\n \"name\": {\n \"type\": \"Property\",\n \"value\": \"Checkpoint Markt\"\n },\n \"@context\": [\n\t\t\"https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld\",\n \"https://schema.lab.fiware.org/ld/fiware-datamodels-context.jsonld\"\n ]\n}" + }, + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ] + } + }, + "response": [] + }, + { + "name": "Retrieving Context Information", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities?type=Building", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "type", + "value": "Building" + } + ] + }, + "description": "This example returns the full data context" + }, + "response": [] + }, + { + "name": "Obtain Entity Data by id", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001?options=keyValues", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Building:store001" + ], + "query": [ + { + "key": "options", + "value": "keyValues", + "description": "* `keyValues` option in order to get a more compact and brief representation, including just attribute values\n* `values` option combined with a list of attribute values `attrs` for an ordered list of attributes only" + }, + { + "key": "type", + "value": "Store", + "description": "Entity type, to avoid ambiguity in case there are several entities with the same entity id", + "disabled": true + }, + { + "key": "attrs", + "value": "name", + "description": "Ordered list of attribute names to display", + "disabled": true + } + ] + }, + "description": "This example returns the context data of `store1`. The `id` of the entity should be unique or the `type` parameter should also be used" + }, + "response": [] + }, + { + "name": "Obtain Entity Data by type", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities?type=Building", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "options", + "value": "keyValues", + "description": "* `keyValues` option in order to get a more compact and brief representation, including just attribute values\n* `values` option combined with a list of attribute values `attrs` for an ordered list of attributes only", + "disabled": true + }, + { + "key": "attrs", + "value": "name", + "description": "Ordered list of attribute names to display", + "disabled": true + }, + { + "key": "type", + "value": "Building" + } + ] + }, + "description": "This example returns the data of all `Store` entities within the context data" + }, + "response": [] + }, + { + "name": "Filter context data by text value", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/?q=address.addressLocality==Kreuzberg&options=keyValues&type=Building", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ], + "query": [ + { + "key": "q", + "value": "address.addressLocality==Kreuzberg" + }, + { + "key": "options", + "value": "keyValues", + "description": "* `keyValues` option in order to get a more compact and brief representation, including just attribute values\n* `values` option combined with a list of attribute values `attrs` for an ordered list of attributes only" + }, + { + "key": "attrs", + "value": "name", + "description": "Ordered list of attribute names to display", + "disabled": true + }, + { + "key": "type", + "value": "Building" + } + ] + }, + "description": "## Filter context data by comparing the values of an attribute\n\nThis example returns the data of all `Store` entities found in the **Kreuzberg** district of Berlin." + }, + "response": [] + }, + { + "name": "Filter context data by distance", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/?georel=near;maxDistance:1500&geometry=point&coords=52.5162,13.3777&type=Building&options=keyValues", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ], + "query": [ + { + "key": "georel", + "value": "near;maxDistance:1500" + }, + { + "key": "geometry", + "value": "point" + }, + { + "key": "coords", + "value": "52.5162,13.3777" + }, + { + "key": "type", + "value": "Building", + "description": "Entity type, to avoid ambiguity in case there are several entities with the same entity id" + }, + { + "key": "options", + "value": "keyValues", + "description": "* `keyValues` option in order to get a more compact and brief representation, including just attribute values\n* `values` option combined with a list of attribute values `attrs` for an ordered list of attributes only" + }, + { + "key": "attrs", + "value": "name", + "description": "Ordered list of attribute names to display", + "disabled": true + } + ] + }, + "description": "## Filter context data by comparing the values of a geo:point attribute\n\nThis example returns the data of all `Store` entities found within 1.5km the **Brandenburg Gate** in **Berlin** (*52.5162N 13.3777W*)" + }, + "response": [] + } + ], + "event": [ + { + "listen": "prerequest", + "script": { + "id": "84a79af1-5a37-48a3-a1ce-2e6c2c345bad", + "type": "text/javascript", + "exec": [ + "" + ] + } + }, + { + "listen": "test", + "script": { + "id": "b807b9c1-2bc8-40aa-b68b-3ae334bd7cb5", + "type": "text/javascript", + "exec": [ + "" + ] + } + } + ], + "variable": [ + { + "id": "1b6ed661-35bf-4a6a-b73e-cd0e19f5fb81", + "key": "ScorpioBroker", + "value": "localhost:1026" + } + ], + "protocolProfileBehavior": {} +} \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/Payloads/FIWARE Relationships using Linked Data.postman_collection.json b/scorpio-broker/docs/en/source/Payloads/FIWARE Relationships using Linked Data.postman_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..149bab6d9176e06424e8020d2d1543ae443f725f --- /dev/null +++ b/scorpio-broker/docs/en/source/Payloads/FIWARE Relationships using Linked Data.postman_collection.json @@ -0,0 +1,662 @@ +{ + "info": { + "_postman_id": "a0af916a-3afc-4e7c-885d-ded2b588f031", + "name": "FIWARE Relationships using Linked Data", + "description": "This tutorial discusses relationships between linked data entities and how the concepts of **JSON-LD** and **NGSI-LD**\ncan be used to interrogate entities and navigate from one entity to another. The tutorial discusses a series of simple\nlinked-data data models based around the supermarket chain’s store finder application, and demonstrates how to design\nmodels holding one-to-one, one-to-many and many-to-many relationships. This **NGSI-LD** tutorial is a direct analogue to\nthe earlier _Understanding Entities and Relationships_ tutorial (which was based on the **NGSI v2** interface). The\ndifferences in relationships created using **NSGI v2** and **NGSI-LD** are highlighted and discussed in detail.\n\nThe `docker-compose` file for this tutorial can be found on GitHub: \n\n![GitHub](https://fiware.github.io/tutorials.Relationships-Linked-Data/icon/GitHub-Mark-32px.png) [FIWARE 602: Relationships using Linked Data](https://github.com/Fiware/tutorials.Relationships-Linked-Data)\n\n\nAll NGSI data entity attributes can be divided into one of two types.\n\n- _Property_ attributes\n- _Relationship_ attributes\n\nFor each entity, the _Property_ attributes (including various subtypes such as _GeoProperty_ , _TemporalProperty_ and\ntime values) define the current state something in the real world. As the state of the entity changes the `value` of\neach _Property_ is updated to align with the last real world reading of the the attribute. All _Property_ attributes\nrelate to the state of a single entity.\n\n_Relationship_ attributes correspond to the interactions **between** entities (which are expected to change over time).\nThey effectively provide the graph linking the nodes of the data entities together. Each _Relationship_ attribute holds\nan `object` in the form of a URN - effectively a pointer to another object. _Relationship_ attributes do not hold data\nthemselves.\n\nBoth properties and relationships may in turn have a linked embedded structure (of _properties-of-properties_ or\n_properties-of-relationships or relationships-of-properties_ or _relationships-of-relationships_ etc.) which lead a full\ncomplex knowledge graph.\n\n## Designing Data Models using JSON-LD\n\nIn order for computers to be able to navigate linked data structures, proper ontologically correct data models must be\ncreated and a full `@context` must be defined and made accessible. We can do this by reviewing and updating the existing\ndata models from the NGSI v2 [Entity Relationships](https://github.com/FIWARE/tutorials.Entity-Relationships) tutorial.\n\n### Revision: Data Models for a Stock management system as defined using NGSI-v2\n\nAs a reminder, four types of entity were created in the NGSI v2 stock management system. The relationship between the\nfour NGSI v2 entity models was defined as shown below:\n\n![](https://jason-fox.github.io/tutorials.Relationships-Linked-Data/img/entities-v2.png)\n\nMore details can be found in the NGSI v2\n[Entity Relationships](https://github.com/FIWARE/tutorials.Entity-Relationships) tutorial.\n\nIn NGSI v2 relationship attributes are just standard properties attributes. By convention NGSI v2 relationship\nattributes are given names starting `ref` and are defined using the `type=\"Relationship\"`. However, this is merely\nconvention and may not be followed in all cases. There is no infallible mechanism for detecting which attributes are\nassociative relationships between entities.\n\n### Data Models for a Stock management system defined using NGSI-LD\n\nThe richer [JSON-LD](https://json-ld.org/spec/FCGS/json-ld/20130328) description language is able to define NSGI-LD\nentities by linking entities directly as shown below.\n\n![](https://jason-fox.github.io/tutorials.Relationships-Linked-Data/img/entities-ld.png)\n\nThe complete data model must be understandable by both developers and machines.\n\n- A full Human readable definition of this data model can be found\n [online](https://fiware.github.io/tutorials.Step-by-Step/schema).\n- The machine readable JSON-LD defintion can be found at\n [`https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld`](https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld) -\n this file will be used to provide the `@context` to power our NGSI-LD data entities.\n\nFour data models have been created for this NGSI-LD stock management system. The relationships between the models are\ndescribed below:\n\n- The [**Store** model](https://fiware.github.io/tutorials.Step-by-Step/schema/Store/) is now based on and extends the\n FIWARE\n [**Building** model](https://fiware-datamodels.readthedocs.io/en/latest/Building/Building/doc/spec/index.html). This\n ensures that it offers standard properties for `name`, `address` and category.\n - A Building will hold `furniture` this is a 1-many relationship.\n - Building :arrow_right: Shelf.\n- The [**Shelf** model](https://fiware.github.io/tutorials.Step-by-Step/schema/Shelf/) is a custom data model defined\n for the tutorial\n - Each **Shelf** is `locatedIn` a **Building**. This is a 1-1 relationship. It is the reciprical relationship to\n `furniture` defined above.\n - Shelf :arrow_right: Building.\n - A **Shelf** is `installedBy` a **Person** - this is a 1-1 relationship. A shelf knows who installed it, but it\n is this knowledge is not part of the Person entity itself.\n - Shelf :arrow_right: Person\n - A **Shelf** `stocks` a given **Product**. This is another 1-1 relationship, and again it is not recipricated. A\n **Product** does not know which **Shelf** it is to be found on.\n - Shelf :arrow_right: Product\n- A [**StockOrder** model](https://fiware.github.io/tutorials.Step-by-Step/schema/StockOrder/) replaces the\n **Inventory Item** bridge table defined for NGSI v2 :\n - A **StockOrder** is `requestedBy` a **Person** - this is a 1-1 relationship.\n - StockOrder :arrow_right: Person.\n - A **StockOrder** is `requestedFor` a **Building** - this is a 1-1 relationship.\n - StockOrder :arrow_right: Building.\n - A **StockOrder** is a request for a specific `orderedProduct` - this 1-1 relationship.\n - StockOrder :arrow_right: Product.\n- The [**Product** model](https://fiware.github.io/tutorials.Step-by-Step/schema/Product/) remains unchanged. It has\n no relationships of its own.\n\nAdditionally some relationships have been defined to be linked to `https://schema.org/Person` entities. This could be\noutlinks to a separate HR system for example.\n\n## Comparison between Linked and Non-Linked Data Systems\n\nObviously within a single isolated Smart System itself, it makes no difference whether a rich, complex linked-data\narchitecture is used or a simpler, non-linked-data system is created. However if the data is designed to be shared, then\nlinked data is a requirement to avoid data silos. An external system is unable to \"know\" what relationships are unless\nthey have been provided in a machine readable form.\n\n### Video: Rich Snippets: Product Search\n\nA simple example of an external system interogating for structured data can be found in online product search. Machines\nfrom third parties such as Google are able to read product information (encoded using a standard\n[**Product** data model](https://jsonld.com/product/)) and display a rich snippet of product information with a standard\nstar rating.\n\n[![](http://img.youtube.com/vi/_-rRxKSm2ic/0.jpg)](https://www.youtube.com/watch?v=_-rRxKSm2ic \"Rich Snippets\")\n\nClick on the image above to watch an introductory video on rich snippets for product search.\n\nFurther machine readable data model examples can be found on the [Steal Our JSON-LD](https://jsonld.com/) website.\n\n## Traversing relationships\n\n> **Example**: Imagine the scenario where a pallet of Products are moved from stock in the warehouse (`stockCount`) onto\n> the shelves of the store (`storeCount`) . How would NGSI v2 and NGSI-LD computations differ?\n\n### Relationships without Linked Data\n\nWithout linked data, there is no machine readable way to connect entities together. Every data relationship must be\nknown in advanced somehow. Within an isolated Smart System this is not an issue, since the architect of the system will\nknow in advance _what-connects-to-what_.\n\nFor example in the simple NGSI v2 Entity Relationships tutorial, a convenience bridge table **InventoryItem** entity had\nbeen created specifically to hold both count on the shelf and count in the warehouse in a single entity. In any\ncomputation only the **InventoryItem** entity would be involved. The `stockCount` value would be decremented and the\n`shelfCount` value would incremented. In the NGSI v2 model both the `storeCount` and the `shelfCount` have been placed\ninto the conceptual **InventoryItem** Entity. This is a necessary workaround for NGSI v2 and it allows for simpler data\nreading and data manipulation. However technically it is ontologically incorrect, as there is no such thing as an\n**InventoryItem** in the real world, it is really two separate ledgers, products bought for the store and products sold\non the shelf, which in turn have an indirect relationship.\n\nSince the entity data is not yet machine readable externally, the programmer is free to design models as she sees fit\nand can decide to update two attributes of one **InventoryItem** Entity or two separate attributes on two separate\n**Shelf** and **StockOrder** entities without regards as to whether these really are real concrete items in the real\nworld. However this means **external systems** cannot discover information for themselves and must be pre-programmed to\nknow where information is held.\n\n### Relationships with Linked Data\n\nWith a well defined data model using linked data, every relationship can be predefined in advance and is discoverable.\nUsing [JSON-LD](https://json-ld.org/spec/FCGS/json-ld/20130328) concepts (specifically `@graph` and `@context`) it is\nmuch easier for computers to understand indirect relationships and navigate between linked entities. Due to hese\nadditional annotations it is possible to create usable models which are ontologically correct and therefore **Shelf**\ncan now be directly assigned a `numberOfItems` attribute and bridge table concept is no longer required. This is\nnecessary as other systems may be interogating **Shelf** directly.\n\nSimilarly a real **StockOrder** Entity can be created which holds a entry of which items are currently on order for each\nstore. This is a proper context data entity as `stockCount` describes the current state of a product in the warehouse.\nOnce again this describes a single, real world entity and is ontologically correct.\n\nUnlike the NGSI v2 scenario, with linked data, it would be possible for an **external system** to discover relationships\nand interogate our Supermarket. Imagine for example, an\n[Autonomous Mobile Robot](https://www.intorobotics.com/40-excellent-autonomous-mobile-robots-on-wheels-that-you-can-build-at-home/)\nsystem which is used to move a pallet of products onto a shelf it would be possible for this **external system** to\n\"know\" about our supermarket by navigating the relationships in the linked data the `@graph` from **StockOrder** to\n**Shelf** as shown:\n\n- Some `product:XXX` items have been removed from `stockOrder:0001` - decrement `stockCount`.\n- Interogating the **StockOrder** is discovered that the **Product** is `requestedFor` for a specific URI e.g.\n `store:002`\n\n```json\n \"@graph\": [\n {\n \"@id\": \"tutorial:orderedProduct\",\n \"@type\": \"https://uri.etsi.org/ngsi-ld/Relationship\",\n \"schema:domainIncludes\": [{\"@id\": \"tutorial:StockOrder\"}],\n \"schema:rangeIncludes\": [{\"@id\": \"tutorial:Product\"}],\n \"rdfs:comment\": \"The Product ordered for a store\",\n \"rdfs:label\": \"orderedProduct\"\n },\n ...etc\n]\n```\n\n- It is also discovered from the **StockOrder** model that the `requestedFor` URI defines a **Building**\n\n```json\n \"@graph\": [\n {\n \"@id\": \"tutorial:requestedFor\",\n \"@type\": \"https://uri.etsi.org/ngsi-ld/Relationship\",\n \"schema:domainIncludes\": [{\"@id\": \"tutorial:StockOrder\"}],\n \"schema:rangeIncludes\": [{\"@id\": \"fiware:Building\"}],\n \"rdfs:comment\": \"Store for which an item is requested\",\n \"rdfs:label\": \"requestedFor\"\n },\n ...etc\n]\n```\n\n- It is discovered from the **Building** model that every **Building** contains `furniture` as an array of URIs.\n- It is discovered from the **Building** model that these URIs represent **Shelf** units\n\n```json\n\"@graph\": [\n {\n \"@id\": \"tutorial:furniture\",\n \"@type\": \"https://uri.etsi.org/ngsi-ld/Relationship\",\n \"schema:domainIncludes\": [{\"@id\": \"fiware:Building\"}],\n \"schema:rangeIncludes\": [{\"@id\": \"tutorial:Shelf\"}],\n \"rdfs:comment\": \"Units found within a Building\",\n \"rdfs:label\": \"furniture\"\n },\n ...etc\n]\n```\n\n- It is discovered from the **Shelf** model that the `stocks` attribute holds a URI representing **Product** items.\n\n```json\n\"@graph\": [\n {\n \"@id\": \"tutorial:stocks\",\n \"@type\": \"https://uri.etsi.org/ngsi-ld/Relationship\",\n \"schema:domainIncludes\": [{\"@id\": \"tutorial:Shelf\"}],\n \"schema:rangeIncludes\": [{\"@id\": \"tutorial:Product\"}],\n \"rdfs:comment\": \"The product found on a shelf\",\n \"rdfs:label\": \"stocks\"\n },\n ...etc\n]\n```\n\n- A request the **Shelf** unit which holds the correct **Product** for the `stocks` attribute is made and the Shelf\n `numberOfItems` attribute can be incremented.\n\nThrough creating and using standard data models and decribing the linked data properly, it would not matter to the robot\nif the underlying system were to change, provided that the Properties and Relationships resolve to fully qualified names\n(FQN) and a complete `@graph`. For example the JSON short name attributes could be amended or the relationships\nredesigned but their real intent (which resolves to a fixed FQN) could still be discovered and used.\n\n# Prerequisites\n\n## Docker\n\nTo keep things simple all components will be run using [Docker](https://www.docker.com). **Docker** is a container\ntechnology which allows to different components isolated into their respective environments.\n\n- To install Docker on Windows follow the instructions [here](https://docs.docker.com/docker-for-windows/)\n- To install Docker on Mac follow the instructions [here](https://docs.docker.com/docker-for-mac/)\n- To install Docker on Linux follow the instructions [here](https://docs.docker.com/install/)\n\n**Docker Compose** is a tool for defining and running multi-container Docker applications. A\n[YAML file](https://raw.githubusercontent.com/Fiware/tutorials.Identity-Management/master/docker-compose.yml) is used\nconfigure the required services for the application. This means all container services can be brought up in a single\ncommand. Docker Compose is installed by default as part of Docker for Windows and Docker for Mac, however Linux users\nwill need to follow the instructions found [here](https://docs.docker.com/compose/install/)\n\n## Cygwin\n\nWe will start up our services using a simple bash script. Windows users should download [cygwin](http://www.cygwin.com/)\nto provide a command-line functionality similar to a Linux distribution on Windows.\n\n\n# Creating and Associating Data Entities\n\nOn start up, the system is brought up with a series of **Building**, **Product** and **Shelf** entities already present.\nYou can query for them using the requests below. In each case only the _Properties_ of the entities have been created.\n\nTo avoid ambiguity, computers prefer to use unique IDs when referring to well defined concepts. For each of the NGSI-LD\nentities returned, the names of the attributes received can be defined as either as a fully qualified name (FQN) or as\nsimple JSON attributes dependent upon whether the associated `Link` header connecting the NGSI-LD Data Entity to the\ncomputer readable JSON-LD `@context` Data Models is included in the request.", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" + }, + "item": [ + { + "name": "Obtain full Linked Data model context", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "{{datamodels-context.jsonld}}", + "host": [ + "{{datamodels-context.jsonld}}" + ] + }, + "description": "Information about the data models and relationships used within this tutorial\ncan be obtained by requesting the full `@context` and `@graph`." + }, + "response": [] + }, + { + "name": "Display all Buildings", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "type": "text", + "value": "; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "disabled": true + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities?type=https://uri.fiware.org/ns/datamodels%23Building&options=keyValues", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "type", + "value": "https://uri.fiware.org/ns/datamodels%23Building" + }, + { + "key": "options", + "value": "keyValues" + } + ] + }, + "description": "The Stores of the supermarket have been created using the FIWARE\n[**Building** model](https://fiware-datamodels.readthedocs.io/en/latest/Building/Building/doc/spec/index.html) and the\nenumerated value of this type is `fiware:Building` which expands to `https://uri.fiware.org/ns/datamodels%23Building`.\nIt is therefore possible to request all building entities without supplying a known context.\n\nThe response returns all of the existing **Building** entities, with the attributes expanded as fully qualified names\n(FQNs).\n\nAccording to the [defined data model](https://fiware.github.io/tutorials.Step-by-Step/schema/Store/):\n\n- The `type` attribute is an `https://uri.etsi.org/ngsi-ld/type`\n- The `name` attribute is an `https://uri.etsi.org/ngsi-ld/name`\n- The `location` attribute is an `https://uri.etsi.org/ngsi-ld/location`\n- The `address` attribute is an `http://schema.org/address`\n- The `category` attribute is an `https://uri.fiware.org/ns/datamodels#category`\n\n`type`, `name` and `location` are defined in the NGSI-LD Core Context:\n[`https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld`](https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld).\nThe other attributes are defined using the Tutorial's own Context:\n[`https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld`](https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld).\nBoth `category` and `address` are _common_ attributes the defintions of which are brought in from the FIWARE data models\nand `schema.org` respectively." + }, + "response": [] + }, + { + "name": "Display all Products", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "type": "text", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities?type=https://fiware.github.io/tutorials.Step-by-Step/schema/Product&options=keyValues", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "type", + "value": "https://fiware.github.io/tutorials.Step-by-Step/schema/Product" + }, + { + "key": "options", + "value": "keyValues" + } + ] + }, + "description": "Requesting the **Product** entities can be done by supplying the FQN of the entity `type` in the request as well.\n\nHowever since the full context has been supplied in the `Link` header, the short names are returned.\n\nAccording to the [defined data model](https://fiware.github.io/tutorials.Step-by-Step/schema/Product/):\n\n- The `type` attribute is an `https://uri.etsi.org/ngsi-ld/type`\n- The `name` attribute is an `https://uri.etsi.org/ngsi-ld/name`\n- The `price` attribute is an `https://fiware.github.io/tutorials.Step-by-Step/schema/price`\n- The `size` attribute is an `https://fiware.github.io/tutorials.Step-by-Step/schema/size`\n- The `currency` attribute is an `https://fiware.github.io/tutorials.Step-by-Step/schema/currency`\n\nThe programmatically the Product model is fully described in the\n[`https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld`](https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld)" + }, + "response": [] + }, + { + "name": "Display all Shelves", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "type": "text", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities?type=Shelf&options=keyValues", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities" + ], + "query": [ + { + "key": "type", + "value": "Shelf" + }, + { + "key": "options", + "value": "keyValues" + } + ] + }, + "description": "Requesting the **Product** entities can be done by supplying the short of the entity `type` in the request as well,\nprovided the full context has been supplied in the `Link` header.\n\nOnce again the short names are returned.\n\nAccording to the [defined data model](https://fiware.github.io/tutorials.Step-by-Step/schema/Shelf/):\n\n- The `type` attribute is an `https://uri.etsi.org/ngsi-ld/type`\n- The `name` attribute is an `https://uri.etsi.org/ngsi-ld/name`\n- The `location` attribute is an `https://uri.etsi.org/ngsi-ld/location`\n- The `maxCapacity` attribute is an `https://fiware.github.io/tutorials.Step-by-Step/schema/maxCapacity`\n- The `numberOfItems` attribute is an `https://fiware.github.io/tutorials.Step-by-Step/schema/numberOfItems`\n\nThe programmatically the Shelf model is fully described in the\n[`https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld`](https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld)" + }, + "response": [] + }, + { + "name": "Obtain Shelf Information", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:Shelf:unit001/?options=keyValues", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Shelf:unit001", + "" + ], + "query": [ + { + "key": "type", + "value": "Shelf", + "description": "Entity type", + "disabled": true + }, + { + "key": "options", + "value": "keyValues", + "description": "* `keyValues` option in order to get a more compact and brief representation, including just attribute values\n* `values` option combined with a list of attribute values `attrs` for an ordered list of attributes only" + } + ] + }, + "description": "This example returns the context data of the *Shelf* entity with the `id=urn:ngsi-ld:Shelf:unit001`.\n\nThere are currently three additional property attributes present `location`, `maxCapacity` and `name`" + }, + "response": [] + }, + { + "name": "Adding 1-1 relationships", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + }, + { + "key": "fiware-servicepath", + "value": "/", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"stocks\": {\n \t\"type\": \"Relationship\",\n \"object\": \"urn:ngsi-ld:Product:001\"\n },\n \"numberOfItems\": {\"type\": \"Property\",\"value\": 50},\n \"locatedIn\" : {\n \t\"type\": \"Relationship\", \"object\": \"urn:ngsi-ld:Building:store001\",\n \t\"requestedBy\": {\n\t\t\t\"type\": \"Relationship\",\n\t\t\t\"object\": \"urn:ngsi-ld:Person:bob-the-manager\"\n\t\t},\n \t\"installedBy\": {\n\t\t\t\"type\": \"Relationship\",\n\t\t\t\"object\": \"urn:ngsi-ld:Person:employee001\"\n\t\t},\n\t\t\"statusOfWork\": {\n\t\t\t\"type\": \"Property\",\n\t\t\t\"value\": \"completed\"\n\t\t}\n },\n \"@context\": [\n\t\t\"https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld\",\n \"https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld\"\n ]\n}" + }, + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:Shelf:unit001/attrs", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Shelf:unit001", + "attrs" + ] + }, + "description": "Within the `@context` a **Shelf** has been defined with two relationships. (`stocks` and `locatedIn`)\n\nTo create a relationship add a new attribute with `type=Relationship` and an associated `object` attribute. \\\nValue of `object` is the URN corresponding to the linked data entity.\n\n**Note** that the relationship is currently unidirectional." + }, + "response": [] + }, + { + "name": "Obtain the Updated Shelf", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text", + "disabled": true + }, + { + "key": "", + "value": "", + "type": "text", + "disabled": true + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:Shelf:unit001", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Shelf:unit001" + ], + "query": [ + { + "key": "type", + "value": "Shelf", + "description": "Entity type", + "disabled": true + } + ] + }, + "description": "This example returns the context data of the *Shelf* entity with the `id=urn:ngsi-ld:Shelf:unit001`.\n\nThere are now two additional property attributes present `stocks` and `locatedIn`." + }, + "response": [] + }, + { + "name": "Finding the location of a Shelf", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:Shelf:unit001/?options=keyValues&attrs=none", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Shelf:unit001", + "" + ], + "query": [ + { + "key": "type", + "value": "Shelf", + "description": "Entity type", + "disabled": true + }, + { + "key": "options", + "value": "keyValues" + }, + { + "key": "attrs", + "value": "none" + } + ] + }, + "description": "This example returns the `locatedIn` value associated with a given `Shelf` unit. \n\nIf the `id` and `type` of a data entity are known, a specific field can be requested by using the `attrs` parameter." + }, + "response": [] + }, + { + "name": "Find the ids of all Shelf Units in a Store", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "type": "text", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + }, + { + "key": "Accept", + "name": "Accept", + "type": "text", + "value": "application/json" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/?type=Shelf&options=keyValues&q=locatedIn==\"urn:ngsi-ld:Building:store001\"&attrs=locatedIn", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ], + "query": [ + { + "key": "type", + "value": "Shelf" + }, + { + "key": "options", + "value": "keyValues" + }, + { + "key": "q", + "value": "locatedIn==\"urn:ngsi-ld:Building:store001\"" + }, + { + "key": "attrs", + "value": "locatedIn" + } + ] + }, + "description": "This example returns the `locatedIn` URNs of all **Shelf** entities found within `urn:ngsi-ld:Building:store001`. This is purely an instance of using the `q` parameter to filter on attribute value" + }, + "response": [] + }, + { + "name": "Adding a 1-many relationship", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"furniture\": {\n \t\"type\": \"Relationship\",\n \"object\": [ \"urn:ngsi-ld:Shelf:001\", \"urn:ngsi-ld:Shelf:002\"]\n },\n \"@context\": [\n\t\t\"https://fiware.github.io/tutorials.Step-by-Step/datamodels-context.jsonld\"\n ]\n}" + }, + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001/attrs", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Building:store001", + "attrs" + ] + }, + "description": "To add a 1-many relationship, add an array as \nthe value of `object` attribute. This can be used for simple links without additional data.\nThis method is used to add **Shelf** entities as `furniture` in the **Store**.\n\nThis is the reciprocal relationship to the `locatedIn` attribute on **Shelf**" + }, + "response": [] + }, + { + "name": "Finding all Shelf Units found within a Store", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Accept", + "value": "application/json", + "type": "text", + "name": "Accept" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001?options=keyValues&attrs=furniture", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Building:store001" + ], + "query": [ + { + "key": "options", + "value": "keyValues" + }, + { + "key": "attrs", + "value": "furniture" + } + ] + }, + "description": "To find all the `furniture` within a **Building**, simply make a request to \nretrieve the `furniture` attribute. \n\nBecause the repicrocal relationship already exists,\nAdditional information can be obtained from the **Shelf** entities themselves." + }, + "response": [] + }, + { + "name": "Obtain Updated Building", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001/", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Building:store001", + "" + ], + "query": [ + { + "key": "options", + "value": "keyValues", + "description": "* `keyValues` option in order to get a more compact and brief representation, including just attribute values\n* `values` option combined with a list of attribute values `attrs` for an ordered list of attributes only", + "disabled": true + } + ] + }, + "description": "This example returns the context data of the `Building` entity with the `id=urn:ngsi-ld:Building:store001`.\n\nThe response now includes the additional relationship property `furniture`, which has been added in the previous step." + }, + "response": [] + }, + { + "name": "Creating complex relationships", + "request": { + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/ld+json" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"id\": \"urn:ngsi-ld:StockOrder:001\",\n \"type\": \"StockOrder\",\n \"requestedFor\": {\n \"type\": \"Relationship\",\n \"object\": \"urn:ngsi-ld:Building:store001\"\n },\n \"requestedBy\": {\n\t\"type\": \"Relationship\",\n\t\"object\": \"urn:ngsi-ld:Person:bob-the-manager\"\n },\n \"orderedProduct\": {\n \"type\": \"Relationship\",\n \"object\": \"urn:ngsi-ld:Product:001\"\n },\n \"stockCount\": {\n \"type\": \"Property\",\n \"value\": 10000\n },\n \"orderDate\": {\n \"type\": \"Property\",\n \"value\": {\n \"@type\": \"DateTime\",\n \"@value\": \"2018-08-07T12:00:00Z\"\n }\n },\n \"@context\": [\n \"https://fiware.github.io/tutorials.Step-by-Step/datamodels-context.jsonld\"\n ]\n}" + }, + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ] + }, + "description": "To create a more complex relationship, and additional data entity must be created which holds the current state of the links between real world items.\nIn the case of the NGSI-LD data model we have already created, a **StockOrder** can be used to link **Product**, **Building** and **Person** entities and the state of the relationships between them. As well as _Relationship_ attributes,\na **StockOrder** can hold _Property_ attributes (such as the `stockCount`) and other more complex metadata such as _Properties-of-Properties_ or _Properties-of-Relationships_\n\nThe **StockOrder** is created as a standard NGSI-LD data entity." + }, + "response": [] + }, + { + "name": "Find all stores in which a product is sold", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Accept", + "value": "application/json", + "type": "text", + "name": "Accept" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/?type=StockOrder&q=orderedProduct==\"urn:ngsi-ld:Product:001\"&attrs=requestedFor&options=keyValues", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ], + "query": [ + { + "key": "type", + "value": "StockOrder" + }, + { + "key": "q", + "value": "orderedProduct==\"urn:ngsi-ld:Product:001\"" + }, + { + "key": "attrs", + "value": "requestedFor" + }, + { + "key": "options", + "value": "keyValues" + } + ] + }, + "description": "Since _Relationship_ attributes are just like any other attribute, standard `q` parameter queries can be made on the **StockOrder** to obtain which entity relates to it. For example the query below returns an array of stores in which a given product is sold.\n\nThe query `q==orderedProduct=\"urn:ngsi-ld:Product:001\"` is used to filter the entities.\n\nThe response returns an array of `requestedFor` attributes in the response." + }, + "response": [] + }, + { + "name": "Find all Products sold in a Store", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "type": "text", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"" + }, + { + "key": "Accept", + "name": "Accept", + "type": "text", + "value": "application/json" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/?type=StockOrder&q=requestedFor==\"urn:ngsi-ld:Building:store001\"&options=keyValues&attrs=orderedProduct", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ], + "query": [ + { + "key": "type", + "value": "StockOrder" + }, + { + "key": "q", + "value": "requestedFor==\"urn:ngsi-ld:Building:store001\"" + }, + { + "key": "options", + "value": "keyValues" + }, + { + "key": "attrs", + "value": "orderedProduct" + } + ] + }, + "description": "The query below returns an array of stores in which a given product is sold.\n\nThe query `q==requestedFor=\"urn:ngsi-ld:Building:store001\"` is used to filter the entities.\n\nThe request returns an array of `orderedProduct` attributes in the response.\nThis is the reciprocal of the previous request." + }, + "response": [] + }, + { + "name": "Obtain Stock Order", + "request": { + "method": "GET", + "header": [], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:StockOrder:001?options=keyValues", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:StockOrder:001" + ], + "query": [ + { + "key": "options", + "value": "keyValues", + "description": "* `keyValues` option in order to get a more compact and brief representation, including just attribute values\n* `values` option combined with a list of attribute values `attrs` for an ordered list of attributes only" + } + ] + }, + "description": "A complete stock order can be obtained by making a standard GET request to the `/ngsi-ld/v1/entities/` endpoint and adding the appropriate URN.\n\n\nThe response returns the fully expanded entity." + }, + "response": [] + } + ], + "event": [ + { + "listen": "prerequest", + "script": { + "id": "1585a108-54ce-409e-b07e-17714fe5dd63", + "type": "text/javascript", + "exec": [ + "" + ] + } + }, + { + "listen": "test", + "script": { + "id": "27a93cc4-ef0e-4f48-a669-14e2f0e12c2c", + "type": "text/javascript", + "exec": [ + "" + ] + } + } + ], + "variable": [ + { + "id": "553f4e11-ffbc-4aea-a3d6-3c252eabbbf0", + "key": "ScorpioBroker", + "value": "localhost:1026" + }, + { + "id": "e9f2e052-8e86-46a7-ba2d-ca0bf966c4c1", + "key": "datamodels-context.jsonld", + "value": "https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld" + } + ], + "protocolProfileBehavior": {} +} \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/Payloads/FIWARE Working with Linked Data.postman_collection.json b/scorpio-broker/docs/en/source/Payloads/FIWARE Working with Linked Data.postman_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..e9fc9386c11af635d7e6705472cab8ad6934d541 --- /dev/null +++ b/scorpio-broker/docs/en/source/Payloads/FIWARE Working with Linked Data.postman_collection.json @@ -0,0 +1,400 @@ +{ + "info": { + "_postman_id": "ff9dd5e6-1458-4cb1-869b-29c940cf02f3", + "name": "FIWARE Working with Linked Data", + "description": "[![FIWARE Core Context Management](https://nexus.lab.fiware.org/repository/raw/public/badges/chapters/core.svg)](https://github.com/FIWARE/catalogue/blob/master/core/README.md)\n[![NGSI LD](https://img.shields.io/badge/NGSI-linked_data-red.svg)](https://www.etsi.org/deliver/etsi_gs/CIM/001_099/009/01.01.01_60/gs_CIM009v010101p.pdf)\n\nThis tutorial teaches FIWARE users how to architect and design a system based on **linked data** and to alter linked\ndata context programmatically. The tutorial extends the knowledge gained from the equivalent\n[NGSI-v2 tutorial](https://github.com/FIWARE/tutorials.Accessing-Context/) and enables a user understand how to write\ncode in an [NGSI-LD](https://www.etsi.org/deliver/etsi_gs/CIM/001_099/009/01.01.01_60/gs_CIM009v010101p.pdf) capable\n[Node.js](https://nodejs.org/) [Express](https://expressjs.com/) application in order to retrieve and alter context\ndata. This removes the need to use the command-line to invoke cUrl commands.\n\nThe tutorial is mainly concerned with discussing code written in Node.js, however some of the results can be checked by\nmaking [cUrl](https://ec.haxx.se/) commands.\n\nThe `docker-compose` files for this tutorial can be found on GitHub: \n\n![GitHub](https://fiware.github.io/tutorials.Working-with-Linked-Data/icon/GitHub-Mark-32px.png) [FIWARE 603: Traversing Linked Data Programmatically](https://github.com/Fiware/tutorials.Working-with-Linked-Data)\n\n\n# Working with Linked Data Entities\n\n> - “This is the house that Jack built.\n> - This is the malt that lay in the house that Jack built.\n> - This is the rat that ate the malt
That lay in the house that Jack built.\n> - This is the cat
That killed the rat that ate the malt
That lay in the house that Jack built.\n> - This is the dog that chased the cat
That killed the rat that ate the malt
That lay in the house that\n> Jack built.â€\n>\n> ― This Is the House That Jack Built, Traditional English Nursery Rhyme\n\nNSGI-LD is an evolution of NGSI-v2, so it should not be surprising that Smart solutions based on NSGI-LD will need to\ncover the same basic scenarios as outlined in the previous NGSI-v2\n[tutorial](https://github.com/FIWARE/tutorials.Accessing-Context/) on programatic data access.\n\nNGSI-LD Linked data formalizes the structure of context entities to a greater degree, through restricting data\nattributes to be defined as either _Property_ attributes or _Relationship_ attributes only. This means that it is\npossible to traverse the context data graph with greater certainty when moving from one _Relationship_ to another. All\nthe context data entities within the system are defined by JSON-LD data models, which are formally defined by\nreferencing a context file, and this programatic definition should guarantee that the associated linked entity exists.\n\nThree basic data access scenarios for the supermaket are defined below:\n\n- Reading Data - e.g. Give me all the data for the **Building** entity `urn:ngsi-ld:Building:store001`\n- Aggregation - e.g. Combine the **Products** entities sold in **Building** `urn:ngsi-ld:Building:store001` and\n display the goods for sale\n- Altering context within the system - e.g. Make a sale of a product:\n - Update the daily sales records by the price of the **Product**\n - decrement the `numberOfItems` of the **Shelf** entity\n - Create a new Transaction Log record showing the sale has occurred\n - Raise an alert in the warehouse if less than 10 objects remain on sale\n - etc.\n\nFurther advanced scenarios will be covered in later tutorials\n\n## Linked Data Entities within a stock management system\n\nThe supermarket data created in the [previous tutorial](https://github.com/FIWARE/tutorials.Relationships-Linked-Data/)\nwill be loaded into the context broker. The existing relationships between the entities are defined as shown below:\n\n![](https://fiware.github.io/tutorials.Relationships-Linked-Data/img/entities-ld.png)\n\nThe **Building**, **Product**, **Shelf** and **StockOrder** entities will be used to display data on the frontend of our\ndemo application.\n\n## The teaching goal of this tutorial\n\nThe aim of this tutorial is to improve developer understanding of programmatic access of context data through defining\nand discussing a series of generic code examples covering common data access scenarios. For this purpose a simple\nNode.js Express application will be created.\n\nThe intention here is not to teach users how to write an application in Express - indeed any language could have been\nchosen. It is merely to show how **any** sample programming language could be used alter the context to achieve the\nbusiness logic goals.\n\nObviously, your choice of programming language will depend upon your own business needs - when reading the code below\nplease keep this in mind and substitute Node.js with your own programming language as appropriate.\n\n# Stock Management Frontend\n\nAll the code Node.js Express for the demo can be found within the `ngsi-ld` folder within the GitHub repository.\n[Stock Management example](https://github.com/FIWARE/tutorials.Step-by-Step/tree/master/context-provider). The\napplication runs on the following URLs:\n\n- `http://localhost:3000/app/store/urn:ngsi-ld:Building:store001`\n- `http://localhost:3000/app/store/urn:ngsi-ld:Building:store002`\n- `http://localhost:3000/app/store/urn:ngsi-ld:Building:store003`\n- `http://localhost:3000/app/store/urn:ngsi-ld:Building:store004`\n\n> :information_source: **Tip** Additionally, you can also watch the status of recent requests yourself by following the\n> container logs or viewing information on `localhost:3000/app/monitor` on a web browser.\n>\n> ![FIWARE Monitor](https://fiware.github.io/tutorials.Accessing-Context/img/monitor.png)\n\n# Prerequisites\n\n## Docker\n\nTo keep things simple all components will be run using [Docker](https://www.docker.com). **Docker** is a container\ntechnology which allows to different components isolated into their respective environments.\n\n- To install Docker on Windows follow the instructions [here](https://docs.docker.com/docker-for-windows/)\n- To install Docker on Mac follow the instructions [here](https://docs.docker.com/docker-for-mac/)\n- To install Docker on Linux follow the instructions [here](https://docs.docker.com/install/)\n\n**Docker Compose** is a tool for defining and running multi-container Docker applications. A\n[YAML file](https://raw.githubusercontent.com/fiware/tutorials.Relationships-Linked-Data/master/docker-compose.yml) is\nused configure the required services for the application. This means all container services can be brought up in a\nsingle command. Docker Compose is installed by default as part of Docker for Windows and Docker for Mac, however Linux\nusers will need to follow the instructions found [here](https://docs.docker.com/compose/install/)\n\n## Cygwin\n\nWe will start up our services using a simple bash script. Windows users should download [cygwin](http://www.cygwin.com/)\nto provide a command-line functionality similar to a Linux distribution on Windows.\n\n", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" + }, + "item": [ + { + "name": "Reading Linked Data", + "item": [ + { + "name": "Retrieve a known Store", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Content-Type", + "value": "application/ld+json", + "type": "text" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001/?type=Building&options=keyValues", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Building:store001", + "" + ], + "query": [ + { + "key": "type", + "value": "Building" + }, + { + "key": "options", + "value": "keyValues" + } + ] + }, + "description": "This example reads the context data of a given **Store** entity to display the results on screen. Reading entity data\ncan be done using the `ngsiLD.readEntity()` method - this will fill out the URL for the GET request and make the\nnecessary HTTP call in an asynchronous fashion:\n\n```javascript\nasync function displayStore(req, res) {\n const store = await ngsiLD.readEntity(\n req.params.storeId,\n { options: \"keyValues\" },\n ngsiLD.setHeaders(req.session.access_token, LinkHeader)\n );\n\n return res.render(\"store\", { title: store.name, store });\n}\n```\n\nThe function above also sends some standard HTTP Headers as part of the request - these are defined in the\n`setHeaders()` function.\n\nWithin an NGSI-LD-based system, the usual default HTTP headers would include a `Link` header to send the JSON-LD context\nand a `Content-Type` header to identify the request as `application/ld+json` (note that every NGSI-LD request is valid\nJSON_LD since NGSI-LD is a subset of JSON-LD). Other additional headers such as `X-Auth-Token` can be added to enable\nOAuth2 security.\n\n```javascript\nfunction setHeaders(accessToken, link, contentType) {\n const headers = {};\n if (accessToken) {\n headers[\"X-Auth-Token\"] = accessToken;\n }\n if (link) {\n headers.Link = link;\n }\n if (contentType) {\n headers[\"Content-Type\"] = contentType || \"application/ld+json\";\n }\n return headers;\n}\n```\n\nWithin the `lib/ngsi-ld.js` library file, the `BASE_PATH` defines the location of the Orion Context Broker, reading a\ndata entity is simply a wrapper around an asynchronous HTTP GET request passing the appropriate headers\n\n```javascript\nconst BASE_PATH = process.env.CONTEXT_BROKER || \"http://localhost:1026/ngsi-ld/v1\";\n\nfunction readEntity(entityId, opts, headers = {}) {\n return request({\n qs: opts,\n url: BASE_PATH + \"/entities/\" + entityId,\n method: \"GET\",\n headers,\n json: true\n });\n}\n```" + }, + "response": [] + } + ], + "description": "The code under discussion can be found within the `ngsi-ld/store` controller in the\n[Git Repository](https://github.com/FIWARE/tutorials.Step-by-Step/blob/master/context-provider/controllers/ngsi-ld/store.js)\n\nGoto `http://localhost:3000/app/store/urn:ngsi-ld:Building:store001` to display and interact with the working\nSupermarket data application.\n\n### Initializing the library\n\nAs usual, the code for HTTP access can be split out from the business logic of the Supermarket application itself. The\nlower level calls have been placed into a library file, which simplifies the codebase. This needs to be included in the\nheader of the file as shown. Some constants are also required - for the Supermarket data, the `LinkHeader` is used to\ndefine location of the data models JSON-LD context as\n`https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld`.\n\n```javascript\nconst ngsiLD = require(\"../../lib/ngsi-ld\");\n\nconst LinkHeader =\n '; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\">';\n```", + "event": [ + { + "listen": "prerequest", + "script": { + "id": "a0e52cd2-ce96-47ff-92d5-f3662b7fd7a4", + "type": "text/javascript", + "exec": [ + "" + ] + } + }, + { + "listen": "test", + "script": { + "id": "0ad27612-e6c4-45b0-ac70-b32500fce319", + "type": "text/javascript", + "exec": [ + "" + ] + } + } + ], + "protocolProfileBehavior": {} + }, + { + "name": "Aggregating and Traversing Linked Data", + "item": [ + { + "name": "Find Shelves within a known Store", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Content-Type", + "value": "application/ld+json", + "type": "text" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001/?type=Building&options=keyValues&attrs=furniture", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Building:store001", + "" + ], + "query": [ + { + "key": "type", + "value": "Building" + }, + { + "key": "options", + "value": "keyValues" + }, + { + "key": "attrs", + "value": "furniture" + } + ] + }, + "description": "To access the `furniture` attribute of a known **Building** entity, a `keyValues` request is made using the `attrs`\nparameter.\n\n```javascript\nconst building = await ngsiLD.readEntity(\n req.params.storeId,\n {\n type: \"Building\",\n options: \"keyValues\",\n attrs: \"furniture\"\n },\n ngsiLD.setHeaders(req.session.access_token, LinkHeader)\n);\n```\n\nThe response is a JSON Object which includes a `furniture` attribute which can be manipulated further." + }, + "response": [] + }, + { + "name": "Retrieve Stocked Products from Shelves", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Content-Type", + "value": "application/ld+json", + "type": "text" + }, + { + "key": "Accept", + "value": "application/json", + "type": "text" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/?type=Shelf&options=keyValues&attrs=stocks,numberOfItems&id=urn:ngsi-ld:Shelf:unit001,urn:ngsi-ld:Shelf:unit002,urn:ngsi-ld:Shelf:unit003", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ], + "query": [ + { + "key": "type", + "value": "Shelf" + }, + { + "key": "options", + "value": "keyValues" + }, + { + "key": "attrs", + "value": "stocks,numberOfItems" + }, + { + "key": "id", + "value": "urn:ngsi-ld:Shelf:unit001,urn:ngsi-ld:Shelf:unit002,urn:ngsi-ld:Shelf:unit003" + } + ] + }, + "description": "To retrieve a series of **Shelf** entities, the `ngsiLD.listEntities()` function is called and filtered using the `id`\nparameter. The `id` is just a comma separated list taken from the request above.\n\n```javascript\nlet productsList = await ngsiLD.listEntities(\n {\n type: \"Shelf\",\n options: \"keyValues\",\n attrs: \"stocks,numberOfItems\",\n id: building.furniture.join(\",\")\n },\n ngsiLD.setHeaders(req.session.access_token, LinkHeader)\n);\n```\n\n`listEntities()` is another function within the `lib/ngsi-ld.js` library file\n\n```javascript\nfunction listEntities(opts, headers = {}) {\n return request({\n qs: opts,\n url: BASE_PATH + \"/entities\",\n method: \"GET\",\n headers,\n json: true\n });\n}\n```\n\nThe response is a JSON Array of **Shelf** entities which includes as `stocks` attribute which can be manipulated\nfurther. The code below extracts the ids for later use.\n\n```javascript\nconst stockedProducts = [];\n\nproductsList = _.groupBy(productsList, e => {\n return e.stocks;\n});\n_.forEach(productsList, (value, key) => {\n stockedProducts.push(key);\n});\n```" + }, + "response": [] + }, + { + "name": "Retrieve Products from Shelves", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Content-Type", + "value": "application/ld+json", + "type": "text" + }, + { + "key": "Accept", + "value": "application/json", + "type": "text", + "name": "Accept" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/?type=Product&options=keyValues&attrs=name,price&id=urn:ngsi-ld:Product:001,urn:ngsi-ld:Product:003,urn:ngsi-ld:Product:004", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ], + "query": [ + { + "key": "type", + "value": "Product" + }, + { + "key": "options", + "value": "keyValues" + }, + { + "key": "attrs", + "value": "name,price" + }, + { + "key": "id", + "value": "urn:ngsi-ld:Product:001,urn:ngsi-ld:Product:003,urn:ngsi-ld:Product:004" + } + ] + }, + "description": "To retrieve a series of **Product** entities, the `ngsiLD.listEntities()` function is once again called and filtered\nusing the `id` parameter. The `id` is just a comma separated list taken from the request above.\n\n```javascript\nlet productsInStore = await ngsiLD.listEntities(\n {\n type: \"Product\",\n options: \"keyValues\",\n attrs: \"name,price\",\n id: stockedProducts.join(\",\")\n },\n headers\n);\n```\n\nThe response is a JSON Array of **Product** entities which are then displayed on screen." + }, + "response": [] + } + ], + "description": "To display information at the till, it is necessary to discover information about the products found within a Store.\nFrom the Data Entity diagram we can ascertain that:\n\n- **Building** entities hold related **Shelf** information within the `furniture` _Relationship_\n- **Shelf** entities hold related **Product** information within the `stocks` _Relationship_\n- Products hold `name` and `price` as _Property_ attributes of the **Product** entity itself.\n\nTherefore the code for the `displayTillInfo()` method will consist of the following steps.\n\n1. Make a request to the Context Broker to _find shelves within a known store_\n2. Reduce the result to a `id` parameter and make a second request to the Context Broker to _retrieve stocked products\n from shelves_\n3. Reduce the result to a `id` parameter and make a third request to the Context Broker to _retrieve product details for\n selected shelves_\n\nTo users familar with database joins, it may seem strange being forced to making a series of requests like this, however\nit is necessary due to scalability issues/concerns in a large distributed setup. Direct join requests are not possible\nwith NGSI-LD.", + "protocolProfileBehavior": {} + }, + { + "name": "Updating Linked Data", + "item": [ + { + "name": "Find a shelf stocking a product", + "request": { + "method": "GET", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Content-Type", + "value": "application/ld+json", + "type": "text" + }, + { + "key": "Accept", + "value": "application/json", + "type": "text", + "name": "Accept" + } + ], + "url": { + "raw": "http://{{ScorpioBroker}}/ngsi-ld/v1/entities/?type=Shelf&options=keyValues&q=numberOfItems>0;locatedIn==\"urn:ngsi-ld:Building:store001\";stocks==\"urn:ngsi-ld:Product:001\"", + "protocol": "http", + "host": [ + "{{ScorpioBroker}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "" + ], + "query": [ + { + "key": "type", + "value": "Shelf" + }, + { + "key": "options", + "value": "keyValues" + }, + { + "key": "q", + "value": "numberOfItems>0;locatedIn==\"urn:ngsi-ld:Building:store001\";stocks==\"urn:ngsi-ld:Product:001\"" + } + ] + }, + "description": "To retrieve a series of **Shelf** entities, the `ngsiLD.listEntities()` function is called. It is important to retrieve\nthe current context before amending it, so the `q` parameter is used to only retrieve a shelf from the correct store\ncontaining the correct product. This request is only possible because the **Shelf** data model has been designed to hold\n_relationships_ with both **Building** and **Product**.\n\n```javascript\nconst shelf = await ngsiLD.listEntities(\n {\n type: \"Shelf\",\n options: \"keyValues\",\n attrs: \"stocks,numberOfItems\",\n q: 'numberOfItems>0;locatedIn==\"' + req.body.storeId + '\";stocks==\"' + req.body.productId + '\"',\n limit: 1\n },\n headers\n);\n```" + }, + "response": [] + }, + { + "name": "Update the state of a shelf", + "request": { + "method": "PATCH", + "header": [ + { + "key": "Link", + "value": "<{{datamodels-context.jsonld}}>; rel=\"http://www.w3.org/ns/json-ld#context\"; type=\"application/ld+json\"", + "type": "text" + }, + { + "key": "Content-Type", + "value": "application/json", + "type": "text" + }, + { + "key": "Accept", + "value": "application/json", + "type": "text", + "name": "Accept", + "disabled": true + } + ], + "body": { + "mode": "raw", + "raw": "{ \"numberOfItems\": { \"type\": \"Property\", \"value\": 10 } }" + }, + "url": { + "raw": "http://{{orion}}/ngsi-ld/v1/entities/urn:ngsi-ld:Shelf:unit001/attrs", + "protocol": "http", + "host": [ + "{{orion}}" + ], + "path": [ + "ngsi-ld", + "v1", + "entities", + "urn:ngsi-ld:Shelf:unit001", + "attrs" + ] + }, + "description": "To update an entity a PATCH request is made using the id of the **Shelf** returned in the previous request\n\n```javascript\nconst count = shelf[0].numberOfItems - 1;\nawait ngsiLD.updateAttribute(\n shelf[0].id,\n { numberOfItems: { type: \"Property\", value: count } },\n ngsiLD.setHeaders(req.session.access_token, LinkHeader)\n);\n```\n\nThe asynchronous PATCH request is found in the `updateAttribute()` function within the `lib/ngsi-ld.js` library file\n\n```javascript\nfunction updateAttribute(entityId, body, headers = {}) {\n return request({\n url: BASE_PATH + \"/entities/\" + entityId + \"/attrs\",\n method: \"PATCH\",\n body,\n headers,\n json: true\n });\n}\n```" + }, + "response": [] + } + ], + "protocolProfileBehavior": {} + } + ], + "event": [ + { + "listen": "prerequest", + "script": { + "id": "3cfa34c8-8c56-4741-beee-b59a0ec6ca65", + "type": "text/javascript", + "exec": [ + "" + ] + } + }, + { + "listen": "test", + "script": { + "id": "02069d51-35e3-4f61-b1b5-982b2c942b80", + "type": "text/javascript", + "exec": [ + "" + ] + } + } + ], + "variable": [ + { + "id": "59df592b-d61e-4c0d-853f-0c0b626ff558", + "key": "ScorpioBroker", + "value": "localhost:1026" + }, + { + "id": "9f7f5df5-a853-4947-a76a-53ebedff7607", + "key": "datamodels-context.jsonld", + "value": "https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld" + }, + { + "id": "da06248c-ded2-4e04-b754-aee588cdc59c", + "key": "device-edge", + "value": "localhost:1027" + }, + { + "id": "b481f672-d2f6-4b74-ad65-1d7558bfc2f4", + "key": "context-provider", + "value": "localhost:3000" + } + ], + "protocolProfileBehavior": {} +} \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/doctrees/API_walkthrough.doctree b/scorpio-broker/docs/en/source/_build/doctrees/API_walkthrough.doctree new file mode 100644 index 0000000000000000000000000000000000000000..87eaa6ac7b18429e8c75b0a11567ac7687d27594 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/doctrees/API_walkthrough.doctree differ diff --git a/scorpio-broker/docs/en/source/_build/doctrees/callFlow.doctree b/scorpio-broker/docs/en/source/_build/doctrees/callFlow.doctree new file mode 100644 index 0000000000000000000000000000000000000000..7b9c10e38d69514490df39a0e4c49f31c3651e22 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/doctrees/callFlow.doctree differ diff --git a/scorpio-broker/docs/en/source/_build/doctrees/contributionGuideline.doctree b/scorpio-broker/docs/en/source/_build/doctrees/contributionGuideline.doctree new file mode 100644 index 0000000000000000000000000000000000000000..3c56589739100df1126af52a56d41224ab3f0b70 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/doctrees/contributionGuideline.doctree differ diff --git a/scorpio-broker/docs/en/source/_build/doctrees/docker.doctree b/scorpio-broker/docs/en/source/_build/doctrees/docker.doctree new file mode 100644 index 0000000000000000000000000000000000000000..d591d12e689a52a970edc61d0efbff3969239eac Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/doctrees/docker.doctree differ diff --git a/scorpio-broker/docs/en/source/_build/doctrees/environment.pickle b/scorpio-broker/docs/en/source/_build/doctrees/environment.pickle new file mode 100644 index 0000000000000000000000000000000000000000..e7a400f859eb7f9a1eccb085087fbd208c558d4c Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/doctrees/environment.pickle differ diff --git a/scorpio-broker/docs/en/source/_build/doctrees/errorHandling.doctree b/scorpio-broker/docs/en/source/_build/doctrees/errorHandling.doctree new file mode 100644 index 0000000000000000000000000000000000000000..0b12ef23d37a84db4d27c6fa95b57298026dd146 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/doctrees/errorHandling.doctree differ diff --git a/scorpio-broker/docs/en/source/_build/doctrees/gitGuideline.doctree b/scorpio-broker/docs/en/source/_build/doctrees/gitGuideline.doctree new file mode 100644 index 0000000000000000000000000000000000000000..09fad2b27912c9f137b47c876d595826aa602a29 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/doctrees/gitGuideline.doctree differ diff --git a/scorpio-broker/docs/en/source/_build/doctrees/hardwareRequirement.doctree b/scorpio-broker/docs/en/source/_build/doctrees/hardwareRequirement.doctree new file mode 100644 index 0000000000000000000000000000000000000000..7dd4e9d7ee3c14829cfe763168dafd6d4cd557c2 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/doctrees/hardwareRequirement.doctree differ diff --git a/scorpio-broker/docs/en/source/_build/doctrees/index.doctree b/scorpio-broker/docs/en/source/_build/doctrees/index.doctree new file mode 100644 index 0000000000000000000000000000000000000000..2ab9ea2172b26ec44a38d18c624bb0afbf6cd65d Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/doctrees/index.doctree differ diff --git a/scorpio-broker/docs/en/source/_build/doctrees/introduction.doctree b/scorpio-broker/docs/en/source/_build/doctrees/introduction.doctree new file mode 100644 index 0000000000000000000000000000000000000000..85805bc69226a1e172b3c124655f89608fc462bd Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/doctrees/introduction.doctree differ diff --git a/scorpio-broker/docs/en/source/_build/doctrees/onepageTutorial.doctree b/scorpio-broker/docs/en/source/_build/doctrees/onepageTutorial.doctree new file mode 100644 index 0000000000000000000000000000000000000000..aa4ce21407662485a8c99a8debd1da4b9574426c Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/doctrees/onepageTutorial.doctree differ diff --git a/scorpio-broker/docs/en/source/_build/doctrees/security.doctree b/scorpio-broker/docs/en/source/_build/doctrees/security.doctree new file mode 100644 index 0000000000000000000000000000000000000000..4dfabfa1c2b0acdbff672a766fa84eff8abffc63 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/doctrees/security.doctree differ diff --git a/scorpio-broker/docs/en/source/_build/doctrees/systemOverview.doctree b/scorpio-broker/docs/en/source/_build/doctrees/systemOverview.doctree new file mode 100644 index 0000000000000000000000000000000000000000..a83dd50aa43dcbeac9d424e256d4ac4eaed47cb3 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/doctrees/systemOverview.doctree differ diff --git a/scorpio-broker/docs/en/source/_build/doctrees/troubleshooting.doctree b/scorpio-broker/docs/en/source/_build/doctrees/troubleshooting.doctree new file mode 100644 index 0000000000000000000000000000000000000000..5eaa8e1353f16eaefae91dd67a8f9872b85dc7ba Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/doctrees/troubleshooting.doctree differ diff --git a/scorpio-broker/docs/en/source/_build/html/.buildinfo b/scorpio-broker/docs/en/source/_build/html/.buildinfo new file mode 100644 index 0000000000000000000000000000000000000000..024b37ee341c8b59718a8d0094821cb26064d14d --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/.buildinfo @@ -0,0 +1,4 @@ +# Sphinx build info version 1 +# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. +config: f21b8b60359bcabeb0ff4fcc173507ed +tags: 645f666f9bcd5a90fca523b33c5a78b7 diff --git a/scorpio-broker/docs/en/source/_build/html/API_walkthrough.html b/scorpio-broker/docs/en/source/_build/html/API_walkthrough.html new file mode 100644 index 0000000000000000000000000000000000000000..c637e9fd7e6e4370b827cd19d11823cbc1cbd49f --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/API_walkthrough.html @@ -0,0 +1,320 @@ + + + + + + + + 10. API Walkthrough — ScorpioBroker documentation + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

10. API Walkthrough¶

+ + +++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
API List¶
S.No.External InterfaceModule nameDescriptionHttp verb
    +
  1. +
+
http://ip:port/ngsi-ld/v1/entitiesEntityTo create entity.POST
    +
  1. +
+
http://ip:port/ngsi-ld/v1/entities/{entityId}/attrsEntityTo add attributes to an entity.POST
    +
  1. +
+
http://ip:port/ngsi-ld/v1/entitiesEntity .To retrieve the list of existing entityGET
    +
  1. +
+
http://ip:port/ngsi-ld/v1/entities/{entityId}EntityTo retrieve an entity by its IDGET
    +
  1. +
+
http://ip:port/ngsi-ld/v1/entities/{entityId}/attrsEntityTo update the attributes of an entity.PATCH
    +
  1. +
+
http://ip:port/ngsi-ld/v1/entities/{entityId}/attrs /{attrId}EntityTo partially update an attributePATCH
    +
  1. +
+
http://ip:port/ngsi-ld/v1/entities/{entityId}EntityTo delete an entity by its IDDELETE
    +
  1. +
+
http://ip:port/ngsi-ld/v1/entities/{entityId}/attrs /{attrId}EntityTo delete an attribute.DELETE
    +
  1. +
+
http://ip:port/ngsi-ld/v1/subscriptionsSubscriptionTo add subscription in the Scorpio BrokerPOST
    +
  1. +
+
http://ip:port/ngsi-ld/v1/subscriptionsSubscriptionTo retrieve subscription listGET
    +
  1. +
+
http://ip:port/ngsi-ld/v1/subscriptions/{subscriptionID}SubscriptionTo retrieve subscription details of a particular subscriptionGET
    +
  1. +
+
http://ip:port/ngsi-ld/v1/subscriptions/{subscriptionID}SubscriptionTo update the already existing subscription.PATCH
    +
  1. +
+
http://ip:port/ngsi-ld/v1/subscriptions/{subscriptionID}SubscriptionTo delete a subscriptionDELETE
    +
  1. +
+
http://ip:port/ngsi-ld/v1/csourcesContext Source RegistrationTo add register a csource in the Scorpio BrokerPOST
    +
  1. +
+
http://ip:port/ngsi-ld/v1/csourcesContext Source RegistrationTo retrieve list of Context Source which are registeredGET
    +
  1. +
+
http://ip:port/ngsi-ld/v1/csources/{registrationId}Context Source RegistrationTo retrieve subscription details of a particular csource registrationGET
    +
  1. +
+
http://ip:port/ngsi-ld/v1/csources/{registrationId}Context Source RegistrationTo update the already existing csource registration.PATCH
    +
  1. +
+
http://ip:port/ngsi-ld/v1/csources/{registrationId}Context Source RegistrationTo delete a resgistration.DELETE
    +
  1. +
+
http://ip:port/ngsi-ld/v1/csourceSubscriptionsContext Source Registration SubscriptionTo add subscription for csource in the Scorpio Broker(device)POST
    +
  1. +
+
http://ip:port/ngsi-ld/v1/csourceSubscriptionsContext Source Registration SubscriptionTo retrieve the subscription list of context registration.GET
    +
  1. +
+
http://ip:port/ngsi-ld/v1/csourceSubscriptions/{subscriptionId}Context Source Registration SubscriptionTo retrieve subscription details of csource registration by idGET
    +
  1. +
+
http://ip:port/ngsi-ld/v1/csourceSubscriptions/{subscriptionId}Context Source Registration SubscriptionTo update the subscription of csource registration.PATCH
    +
  1. +
+
http://ip:port/ngsi-ld/v1/csourceSubscriptions/{subscriptionId}Context Source Registration SubscriptionTo delete the subscription of csource registration.DELETE
+
+ + +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/_images/architecture.png b/scorpio-broker/docs/en/source/_build/html/_images/architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..bb4b35451896b991bf5e5df3be4124593701c614 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/architecture.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/dbconfig-1.png b/scorpio-broker/docs/en/source/_build/html/_images/dbconfig-1.png new file mode 100644 index 0000000000000000000000000000000000000000..fd2d0cd9fef30e8a27a84b580b21b2af2ef7ecc4 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/dbconfig-1.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/dbconfig-2.png b/scorpio-broker/docs/en/source/_build/html/_images/dbconfig-2.png new file mode 100644 index 0000000000000000000000000000000000000000..e8d2adcea876cf6f446d5bd65ed477921271f54b Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/dbconfig-2.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/deploymentarchitecture.png b/scorpio-broker/docs/en/source/_build/html/_images/deploymentarchitecture.png new file mode 100644 index 0000000000000000000000000000000000000000..a4d134bce267ab342e9d7cdafc480c626c30c6db Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/deploymentarchitecture.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/flow-1.png b/scorpio-broker/docs/en/source/_build/html/_images/flow-1.png new file mode 100644 index 0000000000000000000000000000000000000000..ae224c46decfd7f08ba2aaed12373918e09dfaea Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/flow-1.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/flow-2.png b/scorpio-broker/docs/en/source/_build/html/_images/flow-2.png new file mode 100644 index 0000000000000000000000000000000000000000..c2c2045803a71c2ef798587d9bd072ce379df168 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/flow-2.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/flow-3.png b/scorpio-broker/docs/en/source/_build/html/_images/flow-3.png new file mode 100644 index 0000000000000000000000000000000000000000..d3de357e17884f707236996b2d1129388cf090c0 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/flow-3.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/flow-4.png b/scorpio-broker/docs/en/source/_build/html/_images/flow-4.png new file mode 100644 index 0000000000000000000000000000000000000000..adb6d1650d835dd81ea7704b091067991668cebb Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/flow-4.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/flow-5.png b/scorpio-broker/docs/en/source/_build/html/_images/flow-5.png new file mode 100644 index 0000000000000000000000000000000000000000..b6052b8529015e069d27f4b8806783a016f522f0 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/flow-5.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/flow-6.png b/scorpio-broker/docs/en/source/_build/html/_images/flow-6.png new file mode 100644 index 0000000000000000000000000000000000000000..d27d4af0c76e381a49deaf588b54d14a84386135 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/flow-6.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/gitGuideline.jpg b/scorpio-broker/docs/en/source/_build/html/_images/gitGuideline.jpg new file mode 100644 index 0000000000000000000000000000000000000000..bea1d4aaf0c95d2f070315fddf56ea4eec54df7a Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/gitGuideline.jpg differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/jre-1.png b/scorpio-broker/docs/en/source/_build/html/_images/jre-1.png new file mode 100644 index 0000000000000000000000000000000000000000..eae1efee4b4f629b9ad83daa9e7c8b20fe417609 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/jre-1.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/jre-2.png b/scorpio-broker/docs/en/source/_build/html/_images/jre-2.png new file mode 100644 index 0000000000000000000000000000000000000000..24894b44da40941dc77937c591be77f3f6f83a7d Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/jre-2.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/jre-3.png b/scorpio-broker/docs/en/source/_build/html/_images/jre-3.png new file mode 100644 index 0000000000000000000000000000000000000000..f4d8b08b05636fcaec9d376712808e0ab83cf5a5 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/jre-3.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/jre-4.png b/scorpio-broker/docs/en/source/_build/html/_images/jre-4.png new file mode 100644 index 0000000000000000000000000000000000000000..f63a21afd5d95f0f03d094a4289682c5f2ed3a4b Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/jre-4.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/security.png b/scorpio-broker/docs/en/source/_build/html/_images/security.png new file mode 100644 index 0000000000000000000000000000000000000000..7f5d3d1197312dc1a1342a1478c20f76cc9f9c56 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/security.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_images/zookee.png b/scorpio-broker/docs/en/source/_build/html/_images/zookee.png new file mode 100644 index 0000000000000000000000000000000000000000..b2e43178d2dc0c42f35187c445eeb2aed9505557 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_images/zookee.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_sources/API_walkthrough.rst.txt b/scorpio-broker/docs/en/source/_build/html/_sources/API_walkthrough.rst.txt new file mode 100644 index 0000000000000000000000000000000000000000..009c53feaf807d648214272f9af14bccf76eea01 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_sources/API_walkthrough.rst.txt @@ -0,0 +1,153 @@ +******************************* +API Walkthrough +******************************* + +.. list-table:: **API List** + :widths: 5 30 15 35 15 + :header-rows: 1 + + * - S.No. + - External Interface + - Module name + - Description + - Http verb + + + * - 1. + - http://ip:port/ngsi-ld/v1/entities + - Entity + - To create entity. + - POST + + * - 2. + - http://ip:port/ngsi-ld/v1/entities/{entityId}/attrs + - Entity + - To add attributes to an entity. + - POST + + * - 3. + - http://ip:port/ngsi-ld/v1/entities + - Entity . + - To retrieve the list of existing entity + - GET + + * - 4. + - http://ip:port/ngsi-ld/v1/entities/{entityId} + - Entity + - To retrieve an entity by its ID + - GET + + * - 5. + - http://ip:port/ngsi-ld/v1/entities/{entityId}/attrs + - Entity + - To update the attributes of an entity. + - PATCH + + * - 6. + - http://ip:port/ngsi-ld/v1/entities/{entityId}/attrs /{attrId} + - Entity + - To partially update an attribute + - PATCH + + * - 7. + - http://ip:port/ngsi-ld/v1/entities/{entityId} + - Entity + - To delete an entity by its ID + - DELETE + + * - 8. + - http://ip:port/ngsi-ld/v1/entities/{entityId}/attrs /{attrId} + - Entity + - To delete an attribute. + - DELETE + + + * - 9. + - http://ip:port/ngsi-ld/v1/subscriptions + - Subscription + - To add subscription in the Scorpio Broker + - POST + + * - 10. + - http://ip:port/ngsi-ld/v1/subscriptions + - Subscription + - To retrieve subscription list + - GET + + * - 11. + - http://ip:port/ngsi-ld/v1/subscriptions/{subscriptionID} + - Subscription + - To retrieve subscription details of a particular subscription + - GET + + * - 12. + - http://ip:port/ngsi-ld/v1/subscriptions/{subscriptionID} + - Subscription + - To update the already existing subscription. + - PATCH + + * - 13. + - http://ip:port/ngsi-ld/v1/subscriptions/{subscriptionID} + - Subscription + - To delete a subscription + - DELETE + + * - 14. + - http://ip:port/ngsi-ld/v1/csources + - Context Source Registration + - To add register a csource in the Scorpio Broker + - POST + + * - 15. + - http://ip:port/ngsi-ld/v1/csources + - Context Source Registration + - To retrieve list of Context Source which are registered + - GET + + * - 16. + - http://ip:port/ngsi-ld/v1/csources/{registrationId} + - Context Source Registration + - To retrieve subscription details of a particular csource registration + - GET + + * - 17. + - http://ip:port/ngsi-ld/v1/csources/{registrationId} + - Context Source Registration + - To update the already existing csource registration. + - PATCH + + * - 18. + - http://ip:port/ngsi-ld/v1/csources/{registrationId} + - Context Source Registration + - To delete a resgistration. + - DELETE + + * - 19. + - http://ip:port/ngsi-ld/v1/csourceSubscriptions + - Context Source Registration Subscription + - To add subscription for csource in the Scorpio Broker(device) + - POST + + * - 20. + - http://ip:port/ngsi-ld/v1/csourceSubscriptions + - Context Source Registration Subscription + - To retrieve the subscription list of context registration. + - GET + + * - 21. + - http://ip:port/ngsi-ld/v1/csourceSubscriptions/{subscriptionId} + - Context Source Registration Subscription + - To retrieve subscription details of csource registration by id + - GET + + * - 22. + - http://ip:port/ngsi-ld/v1/csourceSubscriptions/{subscriptionId} + - Context Source Registration Subscription + - To update the subscription of csource registration. + - PATCH + + * - 23. + - http://ip:port/ngsi-ld/v1/csourceSubscriptions/{subscriptionId} + - Context Source Registration Subscription + - To delete the subscription of csource registration. + - DELETE diff --git a/scorpio-broker/docs/en/source/_build/html/_sources/callFlow.rst.txt b/scorpio-broker/docs/en/source/_build/html/_sources/callFlow.rst.txt new file mode 100644 index 0000000000000000000000000000000000000000..41f3b51801fc0e0ce8604be7941899c5444987b1 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_sources/callFlow.rst.txt @@ -0,0 +1,230 @@ +******************************* +Entity Create/Update/Append +******************************* + +.. figure:: figures/flow-1.png + +The Figure is showing the operational flow of entity create/update/append in the Scorpio Broker system. Following are the marked steps interpretation: + +1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to create/update/append an entity in the form of the HTTP POST request. + +2. The request enters in service API gateway. + + 2.1. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service. + + 2.2. The service API gateway forwards the HTTP request to the Entity Manager micro-service. +    +3. The entity Manager internally calls an LDContext resolver service to resolve the payload with the given context sent along with the POST request. Once the payload is resolved with context, it now fetches the previously stored data/entities from the Topic “Entities†and validates the requested entity against the existing stored entities based on EntityID. + +- If the entity is already present (or with all the attributes and values that are requested to be modified), an error message (“already existsâ€) will be responded for the same and no further step will be executed. + +- Else it will move for further processing. + +4. The Entity Manager (EM) will do publish/store and send the response to the requester for the requested Entity(E1) creation operation given as follows: + + 4.1.EM publishes the E1 in the Kafka under Topic “Entitiesâ€. + + 4.2.EM publishes the E1 in the Kafka under Topic “Entity_Create/Update/Append†as well. + + 4.3. Upon successful pub operation, EM will send the response back. + +**Note**: “Entities†topic will save all changes of an entity done over a period of time by any of the create/update/append operations of an entity. However, “Entity_Create/Update/Append†Topic (specific to CREATE operation) will only store the data changes of entity create operation only. Having different topics per operation will avoid ambiguity situations among different consumers different requirements. E.g. the subscription manager may need to subscribe for the whole entity, a set of specific attributes, or might be some value change of certain attributes. So, managing all these requirements would be hard if a separate topic per operation is not maintained and would be very simplified to provide direct delta change in data for the given entity at any point in time if separate topics per operation are maintained. Therefore, putting all operations data in a single topic cannot offer the required decoupling, simplification, and flexibility to subscribe/manage at operations, data, or delta data level requirements. +So that’s why creating separate topics per operation and one common topic for recording all changes (require to validate the whole entity changes for all operations over a period of time) of all operations to the given entity is the favorable design choice. +The context for the given payload is being stored by the LDContext resolver service in the Kafka topic under the name AtContext.   + +5. When a message gets published to Kafka Topics, the consumers of that topic will get notified who has subscribed or listening to those topics. In this case, the consumers of “Entity Create/Update/Append†topic upon receiving notification will do the following: + + 5.1. Subscription Manager when getting a notification for the related entity it will check for the notification validation for the current entity and checks if the notification needs to be sent accordingly. + + 5.2. Storage Manager, upon notification from Entities & CR Topics, will trigger the further operations to store/modify the entity related changes in the DB tables. + +6. Now entity manager also prepares for registration of the entity data model in the Context Registry. Following are the further functions it performs to achieve the same: + + 6.1. So it prepares the csource registration payload (as per NGSI_LD spec section C.3) from the entity payload and fills the necessary field (like id, endpoint as broker IP, location, etc.). Afterword entity manager writes this created csource payload in the CR Topic. + + 6.2.CR Manager listens to this CR topic and then able to know that some entity has registered. + + 6.3.CR manager writes the updates, if any are there, into the Csource Topic.   + +******************************* +Entity Subscription +******************************* + +.. figure:: figures/flow-2.png + +The Figure is showing the operational flow of entity subscription in the Scorpio Broker system. Following are the marked steps interpretation: + +1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to subscribe for an entity (or attribute) in the form of the HTTP POST request. + +2. The request enters in service API gateway. + + 2.1. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service. + + 2.2. The service API gateway forwards the HTTP request to the Subscription Manager micro-service. +    +3. The Subscription Manager internally calls an LDContext resolver service to resolve the payload with the given context sent along with the POST request. The subscription manager then fetches the previously stored data/entities from the Topic “Subscription†and validates the requested entity against the existing stored values based on EntityID. + +- If the data for the current request is already present, an error message will be responded for the same and no further step will be executed. + +- Else it will move for further processing. + +4. The Subscription Manager (SM) will publish/store and send the response to the requestor for the requested operation given as follows: + + 4.1.SM publish the subscription S(E1) in the Kafka under Topic “Subscription†+ + 4.2.SM will start the notification functionality and will start/keep listening for related subscription on. + + 4.2.1. Entity related topics “Create/Update/Append†+ + 4.2.2.Context source related topic i.e. “CSource†topic for any future registration of context sources. Doing this it avoids the need to query CR explicitly for csources for already subscribed items/entities. + + 4.2.3.CRQueryResult Topic for gathering results of the raised specific queries, if any are there. + + 4.2.4. Upon successful subscription condition of subscription request, SM will notify the subscribed entity to the given endpoint back. And also do the remote subscriptions to the context sources provided by the context registry. + + 4.3. Upon successful pub operation, SM will send the response back +    +5.SM optionally may raise the query to CR by posting in the CRQuery Topic for each of the subscription requests received (only once per each subscription request). When a message gets published to CRQuery Topic, the consumer CR will be notified who has subscribed or listening on this topic. Now, CR will do the following: + + 5.1. CR will receive the notification and checks for the list of context sources by pulling data from CR Topic and/or CSourceSub Topic for whom this subscription may valid. + + 5.2.CR publishes the list of context sources into the CRQueryResult topic upon which the SM would have already started listening and repeat steps 4.2.3 and 4.2.4. + +**Note**: CSource Topic will contain the list of context sources registered through Csource registration interface directly. CR Topic will contain the map of Entity Data model (maintained as an entity ID) created based on entity creation request (through IoT broker interface) and/or provider/data source of that entity model. +Limitation: In the first release of Scorpio Broker, Csource query is not supported instead csource query is based on the internal messaging queue mechanism. In the future, both the message queue and Rest based csource query would be supported. + + + +******************************* +Query +******************************* + +.. figure:: figures/flow-3.png + +The Figure is showing the operational flow of entity subscription in the Scorpio Broker system. Following are the marked steps interpretation: + +1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to query for entities/an entity/attribute in the form of an HTTP GET request. + +2. The request enters in service API gateway. + + 2.1. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service. + + 2.2. The service API gateway forwards the HTTP request to the Query Manager micro-service. +    +3. The query manager now fetches the previously stored data/entities from the Topic “Entitiesâ€. + +- If the query is for all entities or specific entities with id and/or attribute is requested, this will be directly served based on Kafka Entity topic data by query manager without involving the storage manager. In short simpler queries like non-geo queries or without regular expression queries associated with entity or entities can be served directly. In this case, the response will be sent back and processing jumps to step 7.2. + +- For complex queries, the query manager will take help from the storage manager as mention in the following steps. + +4. The Query Manager (in case of complex queries) will publish the query (embedding a used in the message and other metadata) into the Query topic which is being listened by the Storage manager. + +5. The storage manager gets the notification for the requested query and starts processing the query over the DB data and builds the query response. + +6. The storage manager publishes the response of query in the Query topic which is being listened by Query manager. + +7. The QM receives the notification from the QueryResult topic. + + 7.1.  It sends the HTTP response back to the API gateway. + + 7.2.API gateway sends back the response to the end-user/requestor. + + + + + +******************************* +Context Source Registration +******************************* + +.. figure:: figures/flow-4.png + +The Figure is showing the operational flow of context source registration in the Scorpio Broker system. Following are the marked steps interpretation: + +1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to csource registration for in the form of an HTTP POST request. + +2. The request enters in service API gateway. + + a. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service. + + b.The service API gateway forwards the HTTP request to the Context Registry (CR) Manager micro-service.   + +3. The CR manager now fetches the previously stored data/entities from the Topic “CSourceâ€. + + a. If the entry for the request csource is already present it exits the processing and informing the same to the requester. If it is not present, then it continues for further processing. + + b.Now the CR manager performs some basic validation to check if this is a valid request with the valid payload. + + c.CR manager now writes this payload into the Csoure Topic. + +4. The Storage Manager will keep listening for the Csource topic and for any new entry write it perform the relative operation in the database. + +5. The CR manager prepares the response for csource request and + + 5.1 sends the Http response back to the API gateway. + + 5.2 API gateway sends back the response to the end-user/requester. + +**Note**: For Conext Source Update request only the payload will get changes and in step 3 upon validation for the existing entity it will not exit rather it will update the retrieved entity and write it back into the Kafka. The rest of the flow will remain mostly the same. + + + + +******************************* +Context Source Subscription +******************************* + +.. figure:: figures/flow-5.png + +The Figure Scorpio Broker Context Source Subscription Flow is showing the operational flow of context source subscriptions in the Scorpio Broker system. Following are the marked steps interpretation: + +1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to csource updates in the form of an HTTP POST request. + +2. The request enters in service API gateway. + + a. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service. + + b.The service API gateway forwards the HTTP request to the Context Registry (CR) Manager micro-service. +    +3. The CR manager now fetches the previously stored data/entities from the Topic “CSourceSubâ€. + + a. Now the CR manager performs some basic validation to check if this is a valid request with the valid payload. + + b.If the entry for the request csource subscription is already present it exits the processing and informing the same to the requester. If it is not present, then it continues for further processing. + + c.CR manager now writes this payload into the CsourceSub Topic. + + d.In parallel, it will also start an independent thread to listen Csource Topic for the requested subscription and upon the successful condition, the notification will be sent to the registered endpoint provided under subscription payload. + +4. The Storage Manager will keep listening for the CsourceSub topic and for any new/updated entry write it perform the relative operation in the database. + +5. The CR manager prepares the response for csource subscription request and + + 5.1 sends the Http response back to the API gateway. + + 5.2 API gateway sends back the response to the end-user/requester. + + + + +******************************* +History +******************************* + +.. figure:: figures/flow-6.png + +The Figure is showing the operational flow of entity subscription in the Scorpio Broker system. Following are the marked steps interpretation: + +1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to the history manager in the form of an HTTP POST request. + +2. The request enters in service API gateway. + + a. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service. + + b.The service API gateway forwards the HTTP request to the History Manager micro-service.   + +3. The history manager now executes the EVA algorithm approach on the received payload and push payload attributes to Kafka topic “TEMPORALENTITYâ€. + +**Note**: History Manager must walk through each attribute at the root level of the object (except @id and @type). Inside each attribute, it must walk through each instance (array element). Then, it sends the current object to the Kafka topic TEMPORALENTITY. + +4. The history manager will keep listening to the “TEMPORALENTITY†topic and for any new entry and performs the relative operation in the database. \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/_sources/contributionGuideline.rst.txt b/scorpio-broker/docs/en/source/_build/html/_sources/contributionGuideline.rst.txt new file mode 100644 index 0000000000000000000000000000000000000000..4616f01d17300b0996d2f37cada83f619068a5ff --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_sources/contributionGuideline.rst.txt @@ -0,0 +1,53 @@ +******************************** +Branch Management Guidelines +******************************** + +.. figure:: figures/gitGuideline.jpg + +The community can have two main branches with an infinite lifetime: + +1. **Master branch**: This is a highly stable branch that is always production-ready and contains the last release version of source code in production. +2. **Development branch**: Derived from the master branch, the development branch serves as a branch for integrating different features planned for an upcoming release. This branch may or may not be as stable as the master branch. It is where developers collaborate and merge feature branches. All of the changes should be merged back into the master somehow and then tagged with a release number. + +Apart from those two primary branches, there are other branches in the workflow: + +- **Feature Branch**: Forked from the development branch for feature development i.e. enhancement or documentation. Merged back to the development branch after feature development or enhancement implementation. + +- **Bug Branch**: Ramify from the development branch. Merged back to the development branch after bug fixing. + +- **Hotfix branch**: Hotfix branches are created from the master branch. It is the current production release running live and causing troubles due to a severe bug. But changes in development are yet unstable. We may then branch off a hotfix branch and start fixing the problem. It should be the rarest occasion, in case only critical bugs. + +**Note**: Only NLE and NECTI members have the privilege to create and merge the Hotfix branch. + + +.. list-table:: **Branch naming convention** + :widths: 20 50 30 + :header-rows: 1 + + * - Branch + - Branches naming guideline + - Remarks + + + * - Feature branches + - Must branch from: *development*. Must merge back into: *development*. Branch naming convention: *feature-feature_id* + - *feature_id* is the Github issue id from **https://github.com/ScorpioBroker/ScorpioBroker/issues** + + + * - Bug Branches + - Must branch from: *development*. Must merge back into: *development*. Branch naming convention: *bug-bug_id* + - *bug_id* is the Github issue id from **https://github.com/ScorpioBroker/ScorpioBroker/issues** + + + * - Hotfix Branches + - Must branch from: *master branch*. Must merge back into: *master branch*. Branch naming convention: *hotfix-bug number*. + - *Bug number* is the Github issue id from **https://github.com/ScorpioBroker/ScorpioBroker/issues** . + + + +Permissions to the branches: +******************************* + +- **Master** - We tend to very strict that only NLE members and privileged members of NECTI can merge on the Master branch and accept the pull requests. Pull requests to master can be raised by only NECTI OR NLE members. + +- **Development** - Any community member can raise the pull request to the development branch but it should be reviewed by NLE or NECTI members. Development branches commits will be moved to the master branch only when all the test cases written under NGSI-LD test suites, will run successfully. \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/_sources/docker.rst.txt b/scorpio-broker/docs/en/source/_build/html/_sources/docker.rst.txt new file mode 100644 index 0000000000000000000000000000000000000000..f573d424b086f06ccddfd5d19276182b638db0ef --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_sources/docker.rst.txt @@ -0,0 +1,98 @@ +**************************** +Getting a docker container +**************************** + +The current maven build supports two types of docker container generations from the build using maven profiles to trigger it. + +The first profile is called 'docker' and can be called like this +  +.. code-block:: bash + +    mvn clean package -DskipTests -Pdocker + +this will generate individual docker containers for each microservice. The corresponding docker-compose file is `docker-compose-dist.yml` + + +The second profile is called 'docker-aaio' (for almost all in one). This will generate one single docker container for all components of the broker except the Kafka message bus and the Postgres database. + +To get the aaio version run the maven build like this + +.. code-block:: bash + +    mvn clean package -DskipTests -Pdocker-aaio +  +The corresponding docker-compose file is `docker-compose-aaio.yml` + +General remark for the Kafka docker image and docker-compose +============================================================ + +The Kafka docker container requires you to provide the environment variable `KAFKA_ADVERTISED_HOST_NAME`. This has to be changed in the docker-compose files to match your docker host IP. You can use `127.0.0.1` however this will disallow you to run Kafka in a cluster mode. + +For further details please refer to https://hub.docker.com/r/wurstmeister/kafka + +Running docker build outside of Maven +===================================== + +If you want to have the build of the jars separated from the docker build you need to provide certain VARS to docker. +The following list shows all the vars and their intended value if you run docker build from the root dir + +  + - BUILD_DIR_ACS = Core/AtContextServer +  + - BUILD_DIR_SCS = SpringCloudModules/config-server +  + - BUILD_DIR_SES = SpringCloudModules/eureka +  + - BUILD_DIR_SGW = SpringCloudModules/gateway +  + - BUILD_DIR_HMG = History/HistoryManager +  + - BUILD_DIR_QMG = Core/QueryManager +  + - BUILD_DIR_RMG = Registry/RegistryManager +  + - BUILD_DIR_EMG = Core/EntityManager +  + - BUILD_DIR_STRMG = Storage/StorageManager +  + - BUILD_DIR_SUBMG = Core/SubscriptionManager + + - JAR_FILE_BUILD_ACS = AtContextServer-${project.version}.jar +  + - JAR_FILE_BUILD_SCS = config-server-${project.version}.jar +  + - JAR_FILE_BUILD_SES = eureka-server-${project.version}.jar +  + - JAR_FILE_BUILD_SGW = gateway-${project.version}.jar +  + - JAR_FILE_BUILD_HMG = HistoryManager-${project.version}.jar +  + - JAR_FILE_BUILD_QMG = QueryManager-${project.version}.jar +  + - JAR_FILE_BUILD_RMG = RegistryManager-${project.version}.jar +  + - JAR_FILE_BUILD_EMG = EntityManager-${project.version}.jar +  + - JAR_FILE_BUILD_STRMG = StorageManager-${project.version}.jar +  + - JAR_FILE_BUILD_SUBMG = SubscriptionManager-${project.version}.jar + + - JAR_FILE_RUN_ACS = AtContextServer.jar +  + - JAR_FILE_RUN_SCS = config-server.jar +  + - JAR_FILE_RUN_SES = eureka-server.jar +  + - JAR_FILE_RUN_SGW = gateway.jar +  + - JAR_FILE_RUN_HMG = HistoryManager.jar +  + - JAR_FILE_RUN_QMG = QueryManager.jar +  + - JAR_FILE_RUN_RMG = RegistryManager.jar +  + - JAR_FILE_RUN_EMG = EntityManager.jar +  + - JAR_FILE_RUN_STRMG = StorageManager.jar +  + - JAR_FILE_RUN_SUBMG = SubscriptionManager.jar diff --git a/scorpio-broker/docs/en/source/_build/html/_sources/errorHandling.rst.txt b/scorpio-broker/docs/en/source/_build/html/_sources/errorHandling.rst.txt new file mode 100644 index 0000000000000000000000000000000000000000..37c02e6f8ca9d5a5908e4168a6a4c348fa2f5eb7 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_sources/errorHandling.rst.txt @@ -0,0 +1,88 @@ +*********************** +Error Handler +*********************** + +This section will provide info on the error handling mechanism for the Scorpio Broker system. + +Listed below are the events of the system + +.. list-table:: **Error Handling** + :widths: 5 15 35 15 10 20 + :header-rows: 1 + + * - S.No. + - Operation/Event + - Scenario Description + - Responsible  Module + - Error Code/ Response + - Action +     + * - 1. + - InvalidRequest + - The request associated to the operation is syntactically invalid or includes wrong content + - REST Controller + - HTTP 400 + - Log the error & notify the requestor + + * - 2. + - BadRequestData + - The request includes input data which does not meet the requirements of the operation + - REST Controller + - HTTP 400 + - Log the error & notify the requestor + + * - 3. + - AlreadyExists + - The referred element already exists + - REST Controller + - HTTP 409 + - Log the error & notify the requestor + + * - 4. + - OperationNotSupported + - The operation is not supported + - REST Controller + - HTTP 422 + - Log the error & notify the requestor + + * - 5. + - ResourceNotFound + - The referred resource has not been found + - REST Controller + - HTTP 404 + - Log the error & notify the requestor + + * - 6. + - InternalError + - There has been an error during the operation execution + - REST Controller + - HTTP 500 + - Log the error & notify the requestor + + * - 7. + - Method Not Allowed + - There has been an error when a client invokes a wrong HTTP verb over a resource + - REST Controller + - HTTP 405 + - Log the error & notify the requestor + + + + +Please note the errors can also be categorized into following categories for different exceptions that can occur internally to the implementation logic as well: + + 1. Low criticality is those which involve the errors that should be handled by the software logic, and are due to some configuration issues and should not require anything like reset, a reboot of the system. + + 2. Medium Criticality is those which will be tried for the software logic handling but it may need system reset, chip reset and may interrupt system significantly. + + 3. High Criticality is the hardware-based error that should not occur and if occur may need system reset. + +Fail-safe mechanisms for the different category of errors: + + a. For the Low criticality of the errors, logging will be performed, the retry will be performed and error will be handled by means of rollback and sending failure to the upper layers. + + b.For the High Criticality errors, emergency errors will be logged further recommending a reboot. + + c.For the Medium criticality errors logging, retry mechanisms will be implemented further logging emergency logs to the system and recommend a reboot to the administrator. + +During the initialization, failure will be logged as emergency and error will be returned to the calling program \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/_sources/gitGuideline.rst.txt b/scorpio-broker/docs/en/source/_build/html/_sources/gitGuideline.rst.txt new file mode 100644 index 0000000000000000000000000000000000000000..334c78e7903d5f660285aab6efaaff31ca0cf304 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_sources/gitGuideline.rst.txt @@ -0,0 +1,47 @@ +******************************** +Branch Management Guidelines +******************************** + +.. figure:: figures/gitGuideline.jpg + +The community can have two main branches with an infinite lifetime: + +1. **Master branch**: This is a highly stable branch that is always production-ready and contains the last release version of source code in production. +2. **Development branch**: Derived from the master branch, the development branch serves as a branch for integrating different features planned for an upcoming release. This branch may or may not be as stable as the master branch. It is where developers collaborate and merge feature branches. All of the changes should be merged back into the master somehow and then tagged with a release number. + +Apart from those two primary branches, there are other branches in the workflow: + +- **Feature Branch**: Forked from the development branch for feature development i.e. enhancement or documentation. Merged back to the development branch after feature development or enhancement implementation. + +- **Bug Branch**: Ramify from the development branch. Merged back to the development branch after bug fixing. + +- **Hotfix branch**: Hotfix branches are created from the master branch. It is the current production release running live and causing troubles due to a severe bug. But changes in development are yet unstable. We may then branch off a hotfix branch and start fixing the problem. It should be the rarest occasion, in case only critical bugs. + +**Note**: Only NLE and NECTI members have the privilege to create and merge the Hotfix branch. + +.. list-table::  **Branch naming convention** +   :widths: 20 40 40 +   :header-rows: 1 + +   * - Branch +     - Branches naming guideline +     - Remarks +      +   * - Feature branches +     - Must branch from: *development*. Must merge back into: *development*. Branch naming convention: *feature-feature_id* +     - *feature_id* is the Github issue id from **https://github.com/ScorpioBroker/ScorpioBroker/issues** + +   * - Bug Branches +     - Must branch from: *development*. Must merge back into: *development*. Branch naming convention: *bug-bug_id* +     - *bug_id* is the Github issue id from **https://github.com/ScorpioBroker/ScorpioBroker/issues** + +   * - Hotfix Branches +     - Must branch from: *master branch*. Must merge back into: *master branch*. Branch naming convention: *hotfix-bug number*. +     - *Bug number* is the Github issue id from **https://github.com/ScorpioBroker/ScorpioBroker/issues** + +Permissions to the branches: +******************************* + +- **Master** - We tend to very strict that only NLE members and privileged members of NECTI can merge on the Master branch and accept the pull requests. Pull requests to master can be raised by only NECTI OR NLE members. + +- **Development** - Any community member can raise the pull request to the development branch but it should be reviewed by NLE or NECTI members. Development branches commits will be moved to the master branch only when all the test cases written under NGSI-LD test suites, will run successfully. \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/_sources/hardwareRequirement.rst.txt b/scorpio-broker/docs/en/source/_build/html/_sources/hardwareRequirement.rst.txt new file mode 100644 index 0000000000000000000000000000000000000000..7a297160b3e817b58033c225790a881d2a5af7cf --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_sources/hardwareRequirement.rst.txt @@ -0,0 +1,57 @@ +***************************************** +Java 8 System Requirements +***************************************** + +**Windows** + +- Windows 10 (8u51 and above) +- Windows 8.x (Desktop) +- Windows 7 SP1 +- Windows Vista SP2 +- Windows Server 2008 R2 SP1 (64-bit) +- Windows Server 2012 and 2012 R2 (64-bit) +- RAM: 128 MB +- Disk space: 124 MB for JRE; 2 MB for Java Update +- Processor: Minimum Pentium 2 266 MHz processor +- Browsers: Internet Explorer 9 and above, Firefox + +**Mac OS X** + +- Intel-based Mac running Mac OS X 10.8.3+, 10.9+ +- Administrator privileges for installation +- 64-bit browser +- A 64-bit browser (Safari, for example) is required to run Oracle Java on Mac. + +**Linux** + +- Oracle Linux 5.5+1 +- Oracle Linux 6.x (32-bit), 6.x (64-bit)2 +- Oracle Linux 7.x (64-bit)2 (8u20 and above) +- Red Hat Enterprise Linux 5.5+1, 6.x (32-bit), 6.x (64-bit)2 +- Red Hat Enterprise Linux 7.x (64-bit)2 (8u20 and above) +- Suse Linux Enterprise Server 10 SP2+, 11.x +- Suse Linux Enterprise Server 12.x (64-bit)2 (8u31 and above) +- Ubuntu Linux 12.04 LTS, 13.x +- Ubuntu Linux 14.x (8u25 and above) +- Ubuntu Linux 15.04 (8u45 and above) +- Ubuntu Linux 15.10 (8u65 and above) +- Browsers: Firefox + +***************************************** +ZooKeeper Requirements +***************************************** + +ZooKeeper runs in Java, release 1.6 or greater (JDK 6 or greater). +It runs as an ensemble of ZooKeeper servers. +Three ZooKeeper servers are the minimum recommended size for an ensemble, and we also recommend that they run on separate machines. +At Yahoo!, ZooKeeper is usually deployed on dedicated RHEL boxes, with dual-core processors, 2GB of RAM, and 80GB IDE hard drives. + +***************************************** +Recommendations for Kafka +***************************************** + +**Kafka brokers** use both the JVM heap and the OS page cache. The JVM heap is used for the replication of partitions between brokers and for log compaction. Replication requires 1MB (default replica.max.fetch.size) for each partition on the broker. In Apache Kafka 0.10.1 (Confluent Platform 3.1), we added a new configuration (replica.fetch.response.max.bytes) that limits the total RAM used for replication to 10MB, to avoid memory and garbage collection issues when the number of partitions on a broker is high. For log compaction, calculating the required memory is more complicated and we recommend referring to the Kafka documentation if you are using this feature. For small to medium-sized deployments, 4GB heap size is usually sufficient. In addition, it is highly recommended that consumers always read from memory, i.e. from data that was written to Kafka and is still stored in the OS page cache. The amount of memory this requires depends on the rate at which this data is written and how far behind you expect consumers to get. If you write 20GB per hour per broker and you allow brokers to fall 3 hours behind in normal scenario, you will want to reserve 60GB to the OS page cache. In cases where consumers are forced to read from disk, performance will drop significantly + +**Kafka Connect** itself does not use much memory, but some connectors buffer data internally for efficiency. If you run multiple connectors that use buffering, you will want to increase the JVM heap size to 1GB or higher. + +**Consumers** use at least 2MB per consumer and up to 64MB in cases of large responses from brokers (typical for bursty traffic). Producers will have a buffer of 64MB each. Start by allocating 1GB RAM and add 64MB for each producer and 16MB for each consumer planned. \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/_sources/index.rst.txt b/scorpio-broker/docs/en/source/_build/html/_sources/index.rst.txt new file mode 100644 index 0000000000000000000000000000000000000000..cdd5d8c5b955faebb03f2fd4f9bba6b9def62ffd --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_sources/index.rst.txt @@ -0,0 +1,35 @@ +.. toctree:: + :maxdepth: 1 + :caption: Introduction + :numbered: + + introduction.rst + +.. toctree:: + :maxdepth: 1 + :caption: Beginner Guide + :numbered: + + onepageTutorial.rst + +.. toctree:: + :maxdepth: 1 + :caption: User Guide + :numbered: + + hardwareRequirement.rst + errorHandling.rst + security.rst + +.. toctree:: + :maxdepth: 1 + :caption: Advance User Guide + :numbered: + + systemOverview.rst + callFlow.rst + contributionGuideline.rst + API_walkthrough.rst + docker.rst + troubleshooting.rst + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/_sources/introduction.rst.txt b/scorpio-broker/docs/en/source/_build/html/_sources/introduction.rst.txt new file mode 100644 index 0000000000000000000000000000000000000000..1b6b372aba25e551abf88a145db90e5685c5d84f --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_sources/introduction.rst.txt @@ -0,0 +1,30 @@ +******************************* +Introduction +******************************* +In a today's era where people can't imagine there lives without internet same is with our devices, nowadays most of our devices are integrated with the IoT, which give us plethora of advantage but left us with few complexities as well. +One of these is making these devices interact with each other, where each device makes use of different schemas, so to mitigate this issue we have a one-stop solution. + +Scorpio broker is a java based pub-sub service designed and developed for the **FIWARE** platform, build on the top of **spring boot architecture** using **NGSI-LD** concepts. +Scorpio broker allows you to collect, process, notify, and store the IoT data with dynamic context with the use of linked data concepts. +It makes use of the **microservice-based architecture** which has its own advantages over the existing IoT brokers such as scalability, cross-technology integration, etc. + +Scorpio Broker based on NGSI-LD offers a unique feature of Link data context that provides self-contained (or referenced) **dynamic schema definition** (i.e. the context) for contained data in each message/entity. +Thus allows the Scorpio Broker core processing to still remain unified even it gets dynamic context-driven data as its input from different types of data sources coupled(or designed for) with different schemas. + +Key advantages of Scorpio Broker over other brokers: + +- Uses micro-service architecture which enhances the performance drastically. + +- The Scorpio Broker architecture is designed & implemented as a scalable, highly available, and load balanced. + +- Use of Ld which gives us the leverage of dynamic context. + +- Usage of Kafka, allowing us the robust pub-sub service with the facility of scaling with no downtime. + +- It provides fail-over resiliency. + +- It provides load balancing to distribute the load on distributed infrastructure. + +- It is modular enough to offer low coupling and high cohesion by design. + +- It offers different storage integration without changing the application logic time and again. diff --git a/scorpio-broker/docs/en/source/_build/html/_sources/onepageTutorial.rst.txt b/scorpio-broker/docs/en/source/_build/html/_sources/onepageTutorial.rst.txt new file mode 100644 index 0000000000000000000000000000000000000000..c70ad3ec7f9cb0d9c673179faa6b0e1cd4cdac06 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_sources/onepageTutorial.rst.txt @@ -0,0 +1,237 @@ +***************************************** +Installation Guide +***************************************** + +In order to set-up the environment of Scorpio broker, the following dependency needs to be configured:- + +1. Eclipse. +2. Server JRE. +3. ZooKeeper. +4. Apache Kafka. + + +***************************************** +Windows +***************************************** + + +Eclipse installation +############################ + +- **Download the Eclipse Installer.**: + + Download Eclipse Installer from http://www.eclipse.org/downloads.Eclipse is hosted on many mirrors around the world. Please select the one closest to you and start to download the Installer. + +- **Start the Eclipse Installer executable**: + + For Windows users, after the Eclipse Installer, the executable has finished downloading it should be available in your download directory. Start the Eclipse Installer executable. You may get a security warning to run this file. If the Eclipse Foundation is the Publisher, you are good to select Run. + + For Mac and Linux users, you will still need to unzip the download to create the Installer. Start the Installer once it is available. + +- **Select the package to install**: + + The new Eclipse Installer shows the packages available to Eclipse users. You can search for the package you want to install or scroll through the list. Select and click on the package you want to install. + +- **Select your installation folder** + + Specify the folder where you want Eclipse to be installed. The default folder will be in your User directory. Select the ‘Install’ button to begin the installation. + +- **Launch Eclipse** + + Once the installation is complete you can now launch Eclipse. The Eclipse Installer has done its work. Happy coding. + + +JRE Setup +############## + +- Start the JRE installation and hit the “Change destination folder†checkbox, then click 'Install.' + +.. figure:: figures/jre-1.png + +- Change the installation directory to any path without spaces in the folder name. E.g. C:\Java\jre1.8.0_xx\. (By default it will be C:\Program Files\Java\jre1.8.0_xx), then click 'Next.' + + +After you've installed Java in Windows, you must set the  JAVA_HOME  environment variable to point to the Java installation directory. + +**Set the JAVA_HOME Variable** + +To set the JAVA_HOME variable: + +1. Find out where Java is installed. If you didn't change the path during installation, it will be something like this: + + *C:\Program Files\Java\jdk1.8.0_65* + +2. - In Windows 7 right-click **My Computer** and select **Properties** > **Advanced**. + + OR + + - In Windows 8 go to **Control Panel** > **System** > **Advanced System Settings**. + +3. Click the Environment Variables button. + +4. Under System Variables, click New. + +5. In the User Variable Name field, enter: **JAVA_HOME** + +6. In the User Variable Value field, enter your JDK path. + +.. figure:: figures/jre-2.png + + (Java path and version may change according to the version of Kafka you are using) + +7. Now click OK. + +8. Search for a Path variable in the “System Variable†section in the “Environment Variables†dialogue box you just opened. + +9. Edit the path and type *;%JAVA_HOME%\bin* at the end of the text already written there, just like the image below: + +.. figure:: figures/jre-3.png + + +- To confirm the Java installation, just open cmd and type “java –version.â€Â You should be able to see the version of Java you just installed. + +.. figure:: figures/jre-4.png + +If your command prompt somewhat looks like the image above, you are good to go. Otherwise, you need to recheck whether your setup version matches the correct OS architecture (x86, x64), or if the environment variables path is correct. + + +ZooKeeper Installation +############################ + + +1. Go to your ZooKeeper config directory. For me its C:\zookeeper-3.4.7\conf +2. Rename file “zoo_sample.cfg†to “zoo.cfg†+3. Open zoo.cfg in any text editor, like Notepad; I prefer Notepad++. +4. Find and edit dataDir=/tmp/zookeeper to :\zookeeper-3.4.7\data   +5. Add an entry in the System Environment Variables as we did for Java. + + a. Add ZOOKEEPER_HOME = C:\zookeeper-3.4.7 to the System Variables. + b. Edit the System Variable named “Path†and add ;%ZOOKEEPER_HOME%\bin;  + +6. You can change the default Zookeeper port in zoo.cfg file (Default port 2181). +7. Run ZooKeeper by opening a new cmd and type zkserver. +8. You will see the command prompt with some details, like the image below: + +.. figure:: figures/zookee.png + + +Setting Up Kafka +############################ + +1. Go to your Kafka config directory. For example:- **C:\kafka_2.11-0.9.0.0\config** +2. Edit the file “server.properties.†+3. Find and edit the line log.dirs=/tmp/kafka-logs†to “log.dir= C:\kafka_2.11-0.9.0.0\kafka-logs. +4. If your ZooKeeper is running on some other machine or cluster you can edit “zookeeper.connect:2181â€Â to your custom IP and port. For this demo, we are using the same machine so there's no need to change. Also the Kafka port and broker.id are configurable in this file. Leave other settings as is. +5. Your Kafka will run on default port 9092 and connect to ZooKeeper’s default port, 2181. + +**Note**: For running Kafka, zookeepers should run first. At the time of closing Kafka, zookeeper should be closed first than Kafka. + + +Running a Kafka Server +############################ + +Important: Please ensure that your ZooKeeper instance is up and running before starting a Kafka server. + +1. Go to your Kafka installation directory:** C:\kafka_2.11-0.9.0.0\** +2. Open a command prompt here by pressing Shift + right-click and choose the “Open command window here†option). +3. Now type **.\bin\windows\kafka-server-start.bat .\config\server.properties** and press Enter. + + **.\bin\windows\kafka-server-start.bat .\config\server.properties** + + +Setting up PostgreSQL +############################ + +Step 1) Go to https://www.postgresql.org/download and select O.S., it's Windows for me. + + +Step 2) You are given two options:- + + 1. Interactive Installer by EnterpriseDB + 2. Graphical Installer by BigSQL + +BigSQL currently installs pgAdmin version 3 which is deprecated. It's best to choose EnterpriseDB which installs the latest version 4 + + +Step 3) + + 1. You will be prompted to the desired Postgre version and operating system. Select the Postgres 10, as Scorpio has been tested and developed with this version. + + 2. Click the Download Button, Download will begin + +Step 4) Open the downloaded .exe and Click next on the install welcome screen. + + +Step 5) + + 1. Change the Installation directory if required, else leave it to default + + 2.Click Next + + +Step 6) + + 1. You can choose the components you want to install in your system. You may uncheck Stack Builder + + 2. Click on Next + + +Step 7) + + 1. You can change the data location + + 2.Click Next + + +Step 8) + + 1. Enter the superuser password. Make a note of it + + 2.Click Next + + +Step 9) + + 1. Leave the port number as the default + + 2.Click Next + + +Step 10) + + 1. Check the pre-installation summary. + + 2.Click Next + +Step 11) Click the next button + +Step 12) Once install is complete you will see the Stack Builder prompt + + 1. Uncheck that option. We will use Stack Builder in more advance tutorials + + 2.Click Finish + +Step 13) To launch Postgre go to Start Menu and search pgAdmin 4 + +Step 14) You will see pgAdmin homepage + +Step 15) Click on Servers > Postgre SQL 10 in the left tree + +.. figure:: figures/dbconfig-1.png + +Step 16) + + 1.Enter superuser password set during installation + + 2. Click OK + +Step 17) You will see the Dashboard + +.. figure:: figures/dbconfig-2.png + +That's it to Postgre SQL installation. + +***************************************** +Linux +***************************************** + diff --git a/scorpio-broker/docs/en/source/_build/html/_sources/security.rst.txt b/scorpio-broker/docs/en/source/_build/html/_sources/security.rst.txt new file mode 100644 index 0000000000000000000000000000000000000000..3a0e08987860111c8b386bfd9a653c491350b4fc --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_sources/security.rst.txt @@ -0,0 +1,37 @@ +***************************************** +Security Architecture +***************************************** +Scorpio Broker system will also be responsible for any of the Identity & authentication management security. This will include authentication & authorization of requests, users, role base protected resources to access in Scorpio Broker security realm. + +A new Authentication & Authorization service compliant to OAuth2.0 specs has been introduced that will provide the application layer security to the entire Scorpio Broker components & services. + +.. figure:: figures/security.png + +***************************************** +Security - Functional Request Flow +***************************************** +1. Browser/end user sends a resource request which is protected to the Scorpio Broker system using the API gateway REST interface. + +2. API Gateway checks if the security feature is enabled. + + a. If yes then, it checks if the request is already authenticated and already has some existing session. + + - If it does not find any session, then it forwards the request to Authentication & Authorization services. Or + + - If it finds any existing session than it reuses the same session for the authentication purpose and routes the request to the back-end resource service. + + b.If no security is enabled then, it bypasses security check and routes the request to the back-end resource service which is responsible to render the resource against the given request. + +3. Now when the request comes at Authentication & Authorization (Auth in short) service, it responds to the original requester i.e. user/browser with a login form to present their identity based on credentials it has been issued to access the resource. +  +4. So now the user submits the login form with its credential to Auth service. Auth services validate the user credentials based on its Account details and now responded with successful login auth code & also the redirect U to which the user can redirect to fetch its resource request. + +5. User/Browser now redirects at the redirect URL which is in our case is again the API gateway URL with the auth_code that it has received from the Auth service. + +6. Now API gateway again checks the session where it finds the existing session context but now this time since it receives the auth_code in the request so it uses that auth_code and requests the token from Auth service acting as a client on user’s behalf. Auth service based on auth code recognized that it is already logged-in validated user and reverts back with the access token to the API gateway. + +7. The API gateway upon receiving the token (with in the same security session context), now relays/routes to the back-end resource service for the original requested resource/operation. + +8. The back-end resource service is also enabled with security features (if not error will be thrown for the coming secure request). It receives the request and reads the security context out of it and now validates (based on some extracted info) the same with the Auth service to know if this is a valid token/request with the given privileges. Auth service response backs and back-end service decide now whether the local security configuration and the auth service-based access permissions are matching. + +9. If the access permissions/privileges are matched for the incoming request, then it responds back with the requested resources to the user/browser. In case, it does not match the security criteria than it reverts with the error message and reason why it's being denied. \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/_sources/systemOverview.rst.txt b/scorpio-broker/docs/en/source/_build/html/_sources/systemOverview.rst.txt new file mode 100644 index 0000000000000000000000000000000000000000..5a166c21f0159aed4deacefb26cf84c8c999d8ec --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_sources/systemOverview.rst.txt @@ -0,0 +1,37 @@ +***************************************** +Architecture +***************************************** +The deployment architecture leverages the Spring Cloud framework that addresses lots of Micro-services concerns(e.g. scaling, monitoring, fault-tolerant, highly available, secure, decoupled, etc. ) and Kafka based distributed and scalable message queue infrastructure to provide high performance on message processing for a huge number of context requests which is usual in the IoT domain. + +It covers the high-level operations (HTTP based REST with method POST/GET/DELETE/PATCH) request flow from the external world to the Scorpio Broker system.  The external request is served through a unified service API gateway interface that exposes a single IP/port combination to be used for all services that the Scorpio Broker system can provide. In reality, each of the Scorpio Broker services have been implemented as a micro-service that can be deployed as an independent standalone unit in a distributed computing environment. ThE API gateway routes all the incoming requests to the specific Micro-services with the help of THE registration & discovery service. Once the request reaches a micro-service based on the operation requirement it uses(pub/sub) Kafka topics (message queues) for real-time storage and for providing intercommunication among different micro-services (based on requirement) over message queues. + +.. figure:: figures/architecture.png + +- **Application**: End-user/domain applications leverage Scorpio Broker to provide the required information about IoT infrastructure. This application can query, subscribe, update context information to/from the Scorpio Broker as per their requirements. +- **Consumers**: These are the IoT entities or applications that consume the data of Scorpio Broker. +- **Producers**: These are the IoT entities, context source, or applications that produce the context data to the Scorpio Broker. +- **Service API Gateway**: This is the proxy gateway for the external world to access the internal services of the Scorpio Broker system exposed via REST-based HTTP interfaces. All internal Scorpio Broker related services can be accessed through this service gateway using its single IP & port (which are usually static) and extending the service name in the URL. Thus the user does not need to take care of (or learn or use) the IP and Port of every service which often changes dynamically. This makes life easier, especially in a case when multiple services (or micro-service) are running under one system. This is easily solved by the use of proxy gateway(i.e. service API gateway) for all the back-end services. +- **Rest Interface**: These are the HTTP based interfaces for the external entities/applications to consume in order to execute certain operations on Scorpio Broker. The external interface would be visible through the Service API gateway and internal interface mapping to each requested service would be discovered through the service registration & discovery module. +- **Service Discovery & Registration**: This component allows registration of any service (web service/micro-service) with it so that any client using discovery functionality of this component can determine the location of a service instance to which it wants to send requests. So in short, a service registry & discovery implements a database of services, their instances, and their locations. Service instances get registered with the service registry on startup and deregistered on shutdown. A client of the service, query the service registry, which discovers the available instances of a service. A service registry might also invoke a service instance’s health check API to verify that it is able to handle requests. +- **Entity Manager**: This component handles all entity related CRUD operations with the help of other components of the Scorpio Broker. +- **LD Context Resolver**: This component is responsible for expanding the NGSI-LD document based on the JSON-LD @context for further processing by the other components of the Scorpio Broker. +- **Subscription & Notification Manager**: This component is responsible for handling CRUD operations related to entities and/or csource subscription & notification. +- **Query Manager**: This component handles simple or complex queries (e.g. geo-query) to the Scorpio Broker. +- **Storage Manager**: This component is responsible for fetching data from the message broker and then transforming them into relevant schema format in order to persist in DB tables. Additionally, this manager also provides interfaces for complex queries to the DB e.g. Geo query or cross-domain entity context relationship queries. +- **Context Registry Manager**: This component is responsible for providing interfaces for CRUD operations of csource registration/query/ subscription. +- **Health Check & Monitoring**: This component is responsible for monitoring the health of running services & infrastructure. +- **Message Bus Handler**: Every module of the Scorpio Broker may need to communicate with the bus for the inter-module exchange of messages. This interface is provided by the message bus handler. +- **Storage Connectors**: The Scorpio Broker needs to store certain information in different DB formats. So storage connectors (using any type of message broker methodology) provide the way to connect to those storage systems (which may be present locally or remotely). For example, the entity information could be stored in/streamed to a different types of storage systems e.g. MySQL, PostgreSQL, Bigdata, etc. These connectors could also be implemented for storage resiliency purposes. +- **Context Registry Connector**: Scorpio Broker needs to communicate to the context registry in order to know about the registered context sources (brokers/providers) and the type of data model they support. The context registry connector allows the message broker mechanism to connect to the context registry that may be running locally or remotely in federated mode. +- **Storage**: This is the actual storage (e.g. Postgres/Postgis) where data is persisted. +- **Context Registry**: This is the component which is responsible for saving the registration of the context sources/producers. + +***************************************** +Deployment Architecture +***************************************** + +This section is covering the deployment architecture of the Scorpio Broker which is using different technologies stack.   + +.. figure:: figures/deploymentarchitecture.png + +The deployment architecture leverages the Spring Cloud framework that addresses lots of Micro-services concerns(e.g. scaling, monitoring, fault-tolerant, highly available, secure, decoupled, etc. ) and Kafka based distributed and scalable message queue infrastructure to provide high performance on message processing for a huge number of context requests which is usual in the IoT domain. The deployment architecture covers the high-level operations (Http based REST with method POST/GET/DELETE/PATCH) request flow from the external world to the Scorpio Broker system.  The external request is served through a unified service API gateway interface that exposes a single IP/port combination to be used for all services that the Scorpio Broker system can provide. In reality, each of the Scorpio Broker services will be implemented as a micro-service that can be deployed as an independent standalone unit in a distributed computing environment. That API gateway routes all the incoming requests to the specific Micro-services with the help of registration & discovery service. Once the request reaches at micro-service based on the operation requirement it uses(pub/sub) Kafka topics (message queues) for real-time storage and for providing intercommunication among different micro-services (based on requirement) over message queues. \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/_sources/troubleshooting.rst.txt b/scorpio-broker/docs/en/source/_build/html/_sources/troubleshooting.rst.txt new file mode 100644 index 0000000000000000000000000000000000000000..7d2e8fc2da7f544b14a28fc1e856483fe58fdae1 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_sources/troubleshooting.rst.txt @@ -0,0 +1,34 @@ +***************** +Troubleshooting +***************** + +Missing JAXB dependencies +========================= + +When starting the eureka-server you may be facing the **java.lang.TypeNotPresentException: Type javax.xml.bind.JAXBContext not present** exception. It's very likely that you are running Java 11 on your machine then. Starting from Java 9 package `javax.xml.bind` has been marked deprecated and was finally completely removed in Java 11. + +In order to fix this issue and get eureka-server running you need to manually add below JAXB Maven dependencies to `ScorpioBroker/SpringCloudModules/eureka/pom.xml` before starting: + +.. code-block:: xml + +    ... +    +            ... +            +                    com.sun.xml.bind +                    jaxb-core +                    2.3.0.1 +            +            +                    javax.xml.bind +                    jaxb-api +                    2.3.1 +            +            +                    com.sun.xml.bind +                    jaxb-impl +                    2.3.1 +            +            ... +    +    ... \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/_static/ajax-loader.gif b/scorpio-broker/docs/en/source/_build/html/_static/ajax-loader.gif new file mode 100644 index 0000000000000000000000000000000000000000..61faf8cab23993bd3e1560bff0668bd628642330 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_static/ajax-loader.gif differ diff --git a/scorpio-broker/docs/en/source/_build/html/_static/alabaster.css b/scorpio-broker/docs/en/source/_build/html/_static/alabaster.css new file mode 100644 index 0000000000000000000000000000000000000000..be65b13746c7033a612b3de3c75c6750f5479134 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_static/alabaster.css @@ -0,0 +1,693 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +@import url("basic.css"); + +/* -- page layout ----------------------------------------------------------- */ + +body { + font-family: 'goudy old style', 'minion pro', 'bell mt', Georgia, 'Hiragino Mincho Pro', serif; + font-size: 17px; + background-color: #fff; + color: #000; + margin: 0; + padding: 0; +} + + +div.document { + width: 940px; + margin: 30px auto 0 auto; +} + +div.documentwrapper { + float: left; + width: 100%; +} + +div.bodywrapper { + margin: 0 0 0 220px; +} + +div.sphinxsidebar { + width: 220px; + font-size: 14px; + line-height: 1.5; +} + +hr { + border: 1px solid #B1B4B6; +} + +div.body { + background-color: #fff; + color: #3E4349; + padding: 0 30px 0 30px; +} + +div.body > .section { + text-align: left; +} + +div.footer { + width: 940px; + margin: 20px auto 30px auto; + font-size: 14px; + color: #888; + text-align: right; +} + +div.footer a { + color: #888; +} + +p.caption { + font-family: inherit; + font-size: inherit; +} + + +div.relations { + display: none; +} + + +div.sphinxsidebar a { + color: #444; + text-decoration: none; + border-bottom: 1px dotted #999; +} + +div.sphinxsidebar a:hover { + border-bottom: 1px solid #999; +} + +div.sphinxsidebarwrapper { + padding: 18px 10px; +} + +div.sphinxsidebarwrapper p.logo { + padding: 0; + margin: -10px 0 0 0px; + text-align: center; +} + +div.sphinxsidebarwrapper h1.logo { + margin-top: -10px; + text-align: center; + margin-bottom: 5px; + text-align: left; +} + +div.sphinxsidebarwrapper h1.logo-name { + margin-top: 0px; +} + +div.sphinxsidebarwrapper p.blurb { + margin-top: 0; + font-style: normal; +} + +div.sphinxsidebar h3, +div.sphinxsidebar h4 { + font-family: 'Garamond', 'Georgia', serif; + color: #444; + font-size: 24px; + font-weight: normal; + margin: 0 0 5px 0; + padding: 0; +} + +div.sphinxsidebar h4 { + font-size: 20px; +} + +div.sphinxsidebar h3 a { + color: #444; +} + +div.sphinxsidebar p.logo a, +div.sphinxsidebar h3 a, +div.sphinxsidebar p.logo a:hover, +div.sphinxsidebar h3 a:hover { + border: none; +} + +div.sphinxsidebar p { + color: #555; + margin: 10px 0; +} + +div.sphinxsidebar ul { + margin: 10px 0; + padding: 0; + color: #000; +} + +div.sphinxsidebar ul li.toctree-l1 > a { + font-size: 120%; +} + +div.sphinxsidebar ul li.toctree-l2 > a { + font-size: 110%; +} + +div.sphinxsidebar input { + border: 1px solid #CCC; + font-family: 'goudy old style', 'minion pro', 'bell mt', Georgia, 'Hiragino Mincho Pro', serif; + font-size: 1em; +} + +div.sphinxsidebar hr { + border: none; + height: 1px; + color: #AAA; + background: #AAA; + + text-align: left; + margin-left: 0; + width: 50%; +} + +/* -- body styles ----------------------------------------------------------- */ + +a { + color: #004B6B; + text-decoration: underline; +} + +a:hover { + color: #6D4100; + text-decoration: underline; +} + +div.body h1, +div.body h2, +div.body h3, +div.body h4, +div.body h5, +div.body h6 { + font-family: 'Garamond', 'Georgia', serif; + font-weight: normal; + margin: 30px 0px 10px 0px; + padding: 0; +} + +div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; } +div.body h2 { font-size: 180%; } +div.body h3 { font-size: 150%; } +div.body h4 { font-size: 130%; } +div.body h5 { font-size: 100%; } +div.body h6 { font-size: 100%; } + +a.headerlink { + color: #DDD; + padding: 0 4px; + text-decoration: none; +} + +a.headerlink:hover { + color: #444; + background: #EAEAEA; +} + +div.body p, div.body dd, div.body li { + line-height: 1.4em; +} + +div.admonition { + margin: 20px 0px; + padding: 10px 30px; + background-color: #EEE; + border: 1px solid #CCC; +} + +div.admonition tt.xref, div.admonition code.xref, div.admonition a tt { + background-color: #FBFBFB; + border-bottom: 1px solid #fafafa; +} + +div.admonition p.admonition-title { + font-family: 'Garamond', 'Georgia', serif; + font-weight: normal; + font-size: 24px; + margin: 0 0 10px 0; + padding: 0; + line-height: 1; +} + +div.admonition p.last { + margin-bottom: 0; +} + +div.highlight { + background-color: #fff; +} + +dt:target, .highlight { + background: #FAF3E8; +} + +div.warning { + background-color: #FCC; + border: 1px solid #FAA; +} + +div.danger { + background-color: #FCC; + border: 1px solid #FAA; + -moz-box-shadow: 2px 2px 4px #D52C2C; + -webkit-box-shadow: 2px 2px 4px #D52C2C; + box-shadow: 2px 2px 4px #D52C2C; +} + +div.error { + background-color: #FCC; + border: 1px solid #FAA; + -moz-box-shadow: 2px 2px 4px #D52C2C; + -webkit-box-shadow: 2px 2px 4px #D52C2C; + box-shadow: 2px 2px 4px #D52C2C; +} + +div.caution { + background-color: #FCC; + border: 1px solid #FAA; +} + +div.attention { + background-color: #FCC; + border: 1px solid #FAA; +} + +div.important { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.note { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.tip { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.hint { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.seealso { + background-color: #EEE; + border: 1px solid #CCC; +} + +div.topic { + background-color: #EEE; +} + +p.admonition-title { + display: inline; +} + +p.admonition-title:after { + content: ":"; +} + +pre, tt, code { + font-family: 'Consolas', 'Menlo', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace; + font-size: 0.9em; +} + +.hll { + background-color: #FFC; + margin: 0 -12px; + padding: 0 12px; + display: block; +} + +img.screenshot { +} + +tt.descname, tt.descclassname, code.descname, code.descclassname { + font-size: 0.95em; +} + +tt.descname, code.descname { + padding-right: 0.08em; +} + +img.screenshot { + -moz-box-shadow: 2px 2px 4px #EEE; + -webkit-box-shadow: 2px 2px 4px #EEE; + box-shadow: 2px 2px 4px #EEE; +} + +table.docutils { + border: 1px solid #888; + -moz-box-shadow: 2px 2px 4px #EEE; + -webkit-box-shadow: 2px 2px 4px #EEE; + box-shadow: 2px 2px 4px #EEE; +} + +table.docutils td, table.docutils th { + border: 1px solid #888; + padding: 0.25em 0.7em; +} + +table.field-list, table.footnote { + border: none; + -moz-box-shadow: none; + -webkit-box-shadow: none; + box-shadow: none; +} + +table.footnote { + margin: 15px 0; + width: 100%; + border: 1px solid #EEE; + background: #FDFDFD; + font-size: 0.9em; +} + +table.footnote + table.footnote { + margin-top: -15px; + border-top: none; +} + +table.field-list th { + padding: 0 0.8em 0 0; +} + +table.field-list td { + padding: 0; +} + +table.field-list p { + margin-bottom: 0.8em; +} + +/* Cloned from + * https://github.com/sphinx-doc/sphinx/commit/ef60dbfce09286b20b7385333d63a60321784e68 + */ +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +table.footnote td.label { + width: .1px; + padding: 0.3em 0 0.3em 0.5em; +} + +table.footnote td { + padding: 0.3em 0.5em; +} + +dl { + margin: 0; + padding: 0; +} + +dl dd { + margin-left: 30px; +} + +blockquote { + margin: 0 0 0 30px; + padding: 0; +} + +ul, ol { + /* Matches the 30px from the narrow-screen "li > ul" selector below */ + margin: 10px 0 10px 30px; + padding: 0; +} + +pre { + background: #EEE; + padding: 7px 30px; + margin: 15px 0px; + line-height: 1.3em; +} + +div.viewcode-block:target { + background: #ffd; +} + +dl pre, blockquote pre, li pre { + margin-left: 0; + padding-left: 30px; +} + +tt, code { + background-color: #ecf0f3; + color: #222; + /* padding: 1px 2px; */ +} + +tt.xref, code.xref, a tt { + background-color: #FBFBFB; + border-bottom: 1px solid #fff; +} + +a.reference { + text-decoration: none; + border-bottom: 1px dotted #004B6B; +} + +/* Don't put an underline on images */ +a.image-reference, a.image-reference:hover { + border-bottom: none; +} + +a.reference:hover { + border-bottom: 1px solid #6D4100; +} + +a.footnote-reference { + text-decoration: none; + font-size: 0.7em; + vertical-align: top; + border-bottom: 1px dotted #004B6B; +} + +a.footnote-reference:hover { + border-bottom: 1px solid #6D4100; +} + +a:hover tt, a:hover code { + background: #EEE; +} + + +@media screen and (max-width: 870px) { + + div.sphinxsidebar { + display: none; + } + + div.document { + width: 100%; + + } + + div.documentwrapper { + margin-left: 0; + margin-top: 0; + margin-right: 0; + margin-bottom: 0; + } + + div.bodywrapper { + margin-top: 0; + margin-right: 0; + margin-bottom: 0; + margin-left: 0; + } + + ul { + margin-left: 0; + } + + li > ul { + /* Matches the 30px from the "ul, ol" selector above */ + margin-left: 30px; + } + + .document { + width: auto; + } + + .footer { + width: auto; + } + + .bodywrapper { + margin: 0; + } + + .footer { + width: auto; + } + + .github { + display: none; + } + + + +} + + + +@media screen and (max-width: 875px) { + + body { + margin: 0; + padding: 20px 30px; + } + + div.documentwrapper { + float: none; + background: #fff; + } + + div.sphinxsidebar { + display: block; + float: none; + width: 102.5%; + margin: 50px -30px -20px -30px; + padding: 10px 20px; + background: #333; + color: #FFF; + } + + div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p, + div.sphinxsidebar h3 a { + color: #fff; + } + + div.sphinxsidebar a { + color: #AAA; + } + + div.sphinxsidebar p.logo { + display: none; + } + + div.document { + width: 100%; + margin: 0; + } + + div.footer { + display: none; + } + + div.bodywrapper { + margin: 0; + } + + div.body { + min-height: 0; + padding: 0; + } + + .rtd_doc_footer { + display: none; + } + + .document { + width: auto; + } + + .footer { + width: auto; + } + + .footer { + width: auto; + } + + .github { + display: none; + } +} + + +/* misc. */ + +.revsys-inline { + display: none!important; +} + +/* Make nested-list/multi-paragraph items look better in Releases changelog + * pages. Without this, docutils' magical list fuckery causes inconsistent + * formatting between different release sub-lists. + */ +div#changelog > div.section > ul > li > p:only-child { + margin-bottom: 0; +} + +/* Hide fugly table cell borders in ..bibliography:: directive output */ +table.docutils.citation, table.docutils.citation td, table.docutils.citation th { + border: none; + /* Below needed in some edge cases; if not applied, bottom shadows appear */ + -moz-box-shadow: none; + -webkit-box-shadow: none; + box-shadow: none; +} \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/_static/basic.css b/scorpio-broker/docs/en/source/_build/html/_static/basic.css new file mode 100644 index 0000000000000000000000000000000000000000..19ced1057aebf39d3dac6f6a5d864491231a3e33 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_static/basic.css @@ -0,0 +1,665 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; + word-wrap: break-word; + overflow-wrap : break-word; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox input[type="text"] { + float: left; + width: 80%; + padding: 0.25em; + box-sizing: border-box; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + float: left; + width: 20%; + border-left: none; + padding: 0.25em; + box-sizing: border-box; +} + + +img { + border: 0; + max-width: 100%; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li div.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; + margin-left: auto; + margin-right: auto; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable ul { + margin-top: 0; + margin-bottom: 0; + list-style-type: none; +} + +table.indextable > tbody > tr > td > ul { + padding-left: 0em; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- domain module index --------------------------------------------------- */ + +table.modindextable td { + padding: 2px; + border-collapse: collapse; +} + +/* -- general body styles --------------------------------------------------- */ + +div.body { + min-width: 450px; + max-width: 800px; +} + +div.body p, div.body dd, div.body li, div.body blockquote { + -moz-hyphens: auto; + -ms-hyphens: auto; + -webkit-hyphens: auto; + hyphens: auto; +} + +a.headerlink { + visibility: hidden; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink, +caption:hover > a.headerlink, +p.caption:hover > a.headerlink, +div.code-block-caption:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px 7px 0 7px; + background-color: #ffe; + width: 40%; + float: right; +} + +p.sidebar-title { + font-weight: bold; +} + +/* -- topics ---------------------------------------------------------------- */ + +div.topic { + border: 1px solid #ccc; + padding: 7px 7px 0 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +div.admonition dl { + margin-bottom: 0; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + border: 0; + border-collapse: collapse; +} + +table.align-center { + margin-left: auto; + margin-right: auto; +} + +table caption span.caption-number { + font-style: italic; +} + +table caption span.caption-text { +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +table.footnote td, table.footnote th { + border: 0 !important; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +/* -- figures --------------------------------------------------------------- */ + +div.figure { + margin: 0.5em; + padding: 0.5em; +} + +div.figure p.caption { + padding: 0.3em; +} + +div.figure p.caption span.caption-number { + font-style: italic; +} + +div.figure p.caption span.caption-text { +} + +/* -- field list styles ----------------------------------------------------- */ + +table.field-list td, table.field-list th { + border: 0 !important; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.field-name { + -moz-hyphens: manual; + -ms-hyphens: manual; + -webkit-hyphens: manual; + hyphens: manual; +} + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +dl { + margin-bottom: 15px; +} + +dd p { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +dt:target, span.highlighted { + background-color: #fbe54e; +} + +rect.highlighted { + fill: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.optional { + font-size: 1.3em; +} + +.sig-paren { + font-size: larger; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +span.pre { + -moz-hyphens: none; + -ms-hyphens: none; + -webkit-hyphens: none; + hyphens: none; +} + +td.linenos pre { + padding: 5px 0px; + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + margin-left: 0.5em; +} + +table.highlighttable td { + padding: 0 0.5em 0 0.5em; +} + +div.code-block-caption { + padding: 2px 5px; + font-size: small; +} + +div.code-block-caption code { + background-color: transparent; +} + +div.code-block-caption + div > div.highlight > pre { + margin-top: 0; +} + +div.code-block-caption span.caption-number { + padding: 0.1em 0.3em; + font-style: italic; +} + +div.code-block-caption span.caption-text { +} + +div.literal-block-wrapper { + padding: 1em 1em 0; +} + +div.literal-block-wrapper div.highlight { + margin: 0; +} + +code.descname { + background-color: transparent; + font-weight: bold; + font-size: 1.2em; +} + +code.descclassname { + background-color: transparent; +} + +code.xref, a code { + background-color: transparent; + font-weight: bold; +} + +h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +span.eqno a.headerlink { + position: relative; + left: 0px; + z-index: 1; +} + +div.math:hover a.headerlink { + visibility: visible; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/_static/comment-bright.png b/scorpio-broker/docs/en/source/_build/html/_static/comment-bright.png new file mode 100644 index 0000000000000000000000000000000000000000..15e27edb12ac25701ac0ac21b97b52bb4e45415e Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_static/comment-bright.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_static/comment-close.png b/scorpio-broker/docs/en/source/_build/html/_static/comment-close.png new file mode 100644 index 0000000000000000000000000000000000000000..4d91bcf57de866a901a89a2a68c0f36af1114841 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_static/comment-close.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_static/comment.png b/scorpio-broker/docs/en/source/_build/html/_static/comment.png new file mode 100644 index 0000000000000000000000000000000000000000..dfbc0cbd512bdeefcb1984c99d8e577efb77f006 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_static/comment.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_static/custom.css b/scorpio-broker/docs/en/source/_build/html/_static/custom.css new file mode 100644 index 0000000000000000000000000000000000000000..2a924f1d6a8bc930c5296bdb2d5c2d3e39b04a1c --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_static/custom.css @@ -0,0 +1 @@ +/* This file intentionally left blank. */ diff --git a/scorpio-broker/docs/en/source/_build/html/_static/doctools.js b/scorpio-broker/docs/en/source/_build/html/_static/doctools.js new file mode 100644 index 0000000000000000000000000000000000000000..d8928926bf24bb081d91a12f7b92e1b2d8b142b6 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_static/doctools.js @@ -0,0 +1,313 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for all documentation. + * + * :copyright: Copyright 2007-2018 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/** + * select a different prefix for underscore + */ +$u = _.noConflict(); + +/** + * make the code below compatible with browsers without + * an installed firebug like debugger +if (!window.console || !console.firebug) { + var names = ["log", "debug", "info", "warn", "error", "assert", "dir", + "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", + "profile", "profileEnd"]; + window.console = {}; + for (var i = 0; i < names.length; ++i) + window.console[names[i]] = function() {}; +} + */ + +/** + * small helper function to urldecode strings + */ +jQuery.urldecode = function(x) { + return decodeURIComponent(x).replace(/\+/g, ' '); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s === 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node, addItems) { + if (node.nodeType === 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && + !jQuery(node.parentNode).hasClass(className) && + !jQuery(node.parentNode).hasClass("nohighlight")) { + var span; + var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.className = className; + } + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + if (isInSVG) { + var bbox = span.getBBox(); + var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute('class', className); + var parentOfText = node.parentNode.parentNode; + addItems.push({ + "parent": node.parentNode, + "target": rect}); + } + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this, addItems); + }); + } + } + var addItems = []; + var result = this.each(function() { + highlight(this, addItems); + }); + for (var i = 0; i < addItems.length; ++i) { + jQuery(addItems[i].parent).before(addItems[i].target); + } + return result; +}; + +/* + * backward compatibility for jQuery.browser + * This will be supported until firefox bug is fixed. + */ +if (!jQuery.browser) { + jQuery.uaMatch = function(ua) { + ua = ua.toLowerCase(); + + var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || + /(webkit)[ \/]([\w.]+)/.exec(ua) || + /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || + /(msie) ([\w.]+)/.exec(ua) || + ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || + []; + + return { + browser: match[ 1 ] || "", + version: match[ 2 ] || "0" + }; + }; + jQuery.browser = {}; + jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; +} + +/** + * Small JavaScript module for the documentation. + */ +var Documentation = { + + init : function() { + this.fixFirefoxAnchorBug(); + this.highlightSearchWords(); + this.initIndexTable(); + + }, + + /** + * i18n support + */ + TRANSLATIONS : {}, + PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; }, + LOCALE : 'unknown', + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext : function(string) { + var translated = Documentation.TRANSLATIONS[string]; + if (typeof translated === 'undefined') + return string; + return (typeof translated === 'string') ? translated : translated[0]; + }, + + ngettext : function(singular, plural, n) { + var translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated === 'undefined') + return (n == 1) ? singular : plural; + return translated[Documentation.PLURALEXPR(n)]; + }, + + addTranslations : function(catalog) { + for (var key in catalog.messages) + this.TRANSLATIONS[key] = catalog.messages[key]; + this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); + this.LOCALE = catalog.locale; + }, + + /** + * add context elements like header anchor links + */ + addContextElements : function() { + $('div[id] > :header:first').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this headline')). + appendTo(this); + }); + $('dt[id]').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this definition')). + appendTo(this); + }); + }, + + /** + * workaround a firefox stupidity + * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 + */ + fixFirefoxAnchorBug : function() { + if (document.location.hash && $.browser.mozilla) + window.setTimeout(function() { + document.location.href += ''; + }, 10); + }, + + /** + * highlight the search words provided in the url in the text + */ + highlightSearchWords : function() { + var params = $.getQueryParameters(); + var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; + if (terms.length) { + var body = $('div.body'); + if (!body.length) { + body = $('body'); + } + window.setTimeout(function() { + $.each(terms, function() { + body.highlightText(this.toLowerCase(), 'highlighted'); + }); + }, 10); + $('') + .appendTo($('#searchbox')); + } + }, + + /** + * init the domain index toggle buttons + */ + initIndexTable : function() { + var togglers = $('img.toggler').click(function() { + var src = $(this).attr('src'); + var idnum = $(this).attr('id').substr(7); + $('tr.cg-' + idnum).toggle(); + if (src.substr(-9) === 'minus.png') + $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); + else + $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); + }).css('display', ''); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { + togglers.click(); + } + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords : function() { + $('#searchbox .highlight-link').fadeOut(300); + $('span.highlighted').removeClass('highlighted'); + }, + + /** + * make the url absolute + */ + makeURL : function(relativeURL) { + return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; + }, + + /** + * get the current relative url + */ + getCurrentURL : function() { + var path = document.location.pathname; + var parts = path.split(/\//); + $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { + if (this === '..') + parts.pop(); + }); + var url = parts.join('/'); + return path.substring(url.lastIndexOf('/') + 1, path.length - 1); + }, + + initOnKeyListeners: function() { + $(document).keyup(function(event) { + var activeElementType = document.activeElement.tagName; + // don't navigate when in search box or textarea + if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT') { + switch (event.keyCode) { + case 37: // left + var prevHref = $('link[rel="prev"]').prop('href'); + if (prevHref) { + window.location.href = prevHref; + return false; + } + case 39: // right + var nextHref = $('link[rel="next"]').prop('href'); + if (nextHref) { + window.location.href = nextHref; + return false; + } + } + } + }); + } +}; + +// quick alias for translations +_ = Documentation.gettext; + +$(document).ready(function() { + Documentation.init(); +}); \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/_static/documentation_options.js b/scorpio-broker/docs/en/source/_build/html/_static/documentation_options.js new file mode 100644 index 0000000000000000000000000000000000000000..d0b0ed276b86f5869c3a411b37f92d9f92bfe174 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_static/documentation_options.js @@ -0,0 +1,9 @@ +var DOCUMENTATION_OPTIONS = { + URL_ROOT: '', + VERSION: '', + LANGUAGE: 'None', + COLLAPSE_INDEX: false, + FILE_SUFFIX: '.html', + HAS_SOURCE: true, + SOURCELINK_SUFFIX: '.txt' +}; \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/_static/down-pressed.png b/scorpio-broker/docs/en/source/_build/html/_static/down-pressed.png new file mode 100644 index 0000000000000000000000000000000000000000..5756c8cad8854722893dc70b9eb4bb0400343a39 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_static/down-pressed.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_static/down.png b/scorpio-broker/docs/en/source/_build/html/_static/down.png new file mode 100644 index 0000000000000000000000000000000000000000..1b3bdad2ceffae91cee61b32f3295f9bbe646e48 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_static/down.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_static/file.png b/scorpio-broker/docs/en/source/_build/html/_static/file.png new file mode 100644 index 0000000000000000000000000000000000000000..a858a410e4faa62ce324d814e4b816fff83a6fb3 Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/_static/file.png differ diff --git a/scorpio-broker/docs/en/source/_build/html/_static/jquery-3.2.1.js b/scorpio-broker/docs/en/source/_build/html/_static/jquery-3.2.1.js new file mode 100644 index 0000000000000000000000000000000000000000..d2d8ca4790e52b0537f3cbb7dcd766099b789583 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/_static/jquery-3.2.1.js @@ -0,0 +1,10253 @@ +/*! + * jQuery JavaScript Library v3.2.1 + * https://jquery.com/ + * + * Includes Sizzle.js + * https://sizzlejs.com/ + * + * Copyright JS Foundation and other contributors + * Released under the MIT license + * https://jquery.org/license + * + * Date: 2017-03-20T18:59Z + */ +( function( global, factory ) { + + "use strict"; + + if ( typeof module === "object" && typeof module.exports === "object" ) { + + // For CommonJS and CommonJS-like environments where a proper `window` + // is present, execute the factory and get jQuery. + // For environments that do not have a `window` with a `document` + // (such as Node.js), expose a factory as module.exports. + // This accentuates the need for the creation of a real `window`. + // e.g. var jQuery = require("jquery")(window); + // See ticket #14549 for more info. + module.exports = global.document ? + factory( global, true ) : + function( w ) { + if ( !w.document ) { + throw new Error( "jQuery requires a window with a document" ); + } + return factory( w ); + }; + } else { + factory( global ); + } + +// Pass this if window is not defined yet +} )( typeof window !== "undefined" ? window : this, function( window, noGlobal ) { + +// Edge <= 12 - 13+, Firefox <=18 - 45+, IE 10 - 11, Safari 5.1 - 9+, iOS 6 - 9.1 +// throw exceptions when non-strict code (e.g., ASP.NET 4.5) accesses strict mode +// arguments.callee.caller (trac-13335). But as of jQuery 3.0 (2016), strict mode should be common +// enough that all such attempts are guarded in a try block. +"use strict"; + +var arr = []; + +var document = window.document; + +var getProto = Object.getPrototypeOf; + +var slice = arr.slice; + +var concat = arr.concat; + +var push = arr.push; + +var indexOf = arr.indexOf; + +var class2type = {}; + +var toString = class2type.toString; + +var hasOwn = class2type.hasOwnProperty; + +var fnToString = hasOwn.toString; + +var ObjectFunctionString = fnToString.call( Object ); + +var support = {}; + + + + function DOMEval( code, doc ) { + doc = doc || document; + + var script = doc.createElement( "script" ); + + script.text = code; + doc.head.appendChild( script ).parentNode.removeChild( script ); + } +/* global Symbol */ +// Defining this global in .eslintrc.json would create a danger of using the global +// unguarded in another place, it seems safer to define global only for this module + + + +var + version = "3.2.1", + + // Define a local copy of jQuery + jQuery = function( selector, context ) { + + // The jQuery object is actually just the init constructor 'enhanced' + // Need init if jQuery is called (just allow error to be thrown if not included) + return new jQuery.fn.init( selector, context ); + }, + + // Support: Android <=4.0 only + // Make sure we trim BOM and NBSP + rtrim = /^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g, + + // Matches dashed string for camelizing + rmsPrefix = /^-ms-/, + rdashAlpha = /-([a-z])/g, + + // Used by jQuery.camelCase as callback to replace() + fcamelCase = function( all, letter ) { + return letter.toUpperCase(); + }; + +jQuery.fn = jQuery.prototype = { + + // The current version of jQuery being used + jquery: version, + + constructor: jQuery, + + // The default length of a jQuery object is 0 + length: 0, + + toArray: function() { + return slice.call( this ); + }, + + // Get the Nth element in the matched element set OR + // Get the whole matched element set as a clean array + get: function( num ) { + + // Return all the elements in a clean array + if ( num == null ) { + return slice.call( this ); + } + + // Return just the one element from the set + return num < 0 ? this[ num + this.length ] : this[ num ]; + }, + + // Take an array of elements and push it onto the stack + // (returning the new matched element set) + pushStack: function( elems ) { + + // Build a new jQuery matched element set + var ret = jQuery.merge( this.constructor(), elems ); + + // Add the old object onto the stack (as a reference) + ret.prevObject = this; + + // Return the newly-formed element set + return ret; + }, + + // Execute a callback for every element in the matched set. + each: function( callback ) { + return jQuery.each( this, callback ); + }, + + map: function( callback ) { + return this.pushStack( jQuery.map( this, function( elem, i ) { + return callback.call( elem, i, elem ); + } ) ); + }, + + slice: function() { + return this.pushStack( slice.apply( this, arguments ) ); + }, + + first: function() { + return this.eq( 0 ); + }, + + last: function() { + return this.eq( -1 ); + }, + + eq: function( i ) { + var len = this.length, + j = +i + ( i < 0 ? len : 0 ); + return this.pushStack( j >= 0 && j < len ? [ this[ j ] ] : [] ); + }, + + end: function() { + return this.prevObject || this.constructor(); + }, + + // For internal use only. + // Behaves like an Array's method, not like a jQuery method. + push: push, + sort: arr.sort, + splice: arr.splice +}; + +jQuery.extend = jQuery.fn.extend = function() { + var options, name, src, copy, copyIsArray, clone, + target = arguments[ 0 ] || {}, + i = 1, + length = arguments.length, + deep = false; + + // Handle a deep copy situation + if ( typeof target === "boolean" ) { + deep = target; + + // Skip the boolean and the target + target = arguments[ i ] || {}; + i++; + } + + // Handle case when target is a string or something (possible in deep copy) + if ( typeof target !== "object" && !jQuery.isFunction( target ) ) { + target = {}; + } + + // Extend jQuery itself if only one argument is passed + if ( i === length ) { + target = this; + i--; + } + + for ( ; i < length; i++ ) { + + // Only deal with non-null/undefined values + if ( ( options = arguments[ i ] ) != null ) { + + // Extend the base object + for ( name in options ) { + src = target[ name ]; + copy = options[ name ]; + + // Prevent never-ending loop + if ( target === copy ) { + continue; + } + + // Recurse if we're merging plain objects or arrays + if ( deep && copy && ( jQuery.isPlainObject( copy ) || + ( copyIsArray = Array.isArray( copy ) ) ) ) { + + if ( copyIsArray ) { + copyIsArray = false; + clone = src && Array.isArray( src ) ? src : []; + + } else { + clone = src && jQuery.isPlainObject( src ) ? src : {}; + } + + // Never move original objects, clone them + target[ name ] = jQuery.extend( deep, clone, copy ); + + // Don't bring in undefined values + } else if ( copy !== undefined ) { + target[ name ] = copy; + } + } + } + } + + // Return the modified object + return target; +}; + +jQuery.extend( { + + // Unique for each copy of jQuery on the page + expando: "jQuery" + ( version + Math.random() ).replace( /\D/g, "" ), + + // Assume jQuery is ready without the ready module + isReady: true, + + error: function( msg ) { + throw new Error( msg ); + }, + + noop: function() {}, + + isFunction: function( obj ) { + return jQuery.type( obj ) === "function"; + }, + + isWindow: function( obj ) { + return obj != null && obj === obj.window; + }, + + isNumeric: function( obj ) { + + // As of jQuery 3.0, isNumeric is limited to + // strings and numbers (primitives or objects) + // that can be coerced to finite numbers (gh-2662) + var type = jQuery.type( obj ); + return ( type === "number" || type === "string" ) && + + // parseFloat NaNs numeric-cast false positives ("") + // ...but misinterprets leading-number strings, particularly hex literals ("0x...") + // subtraction forces infinities to NaN + !isNaN( obj - parseFloat( obj ) ); + }, + + isPlainObject: function( obj ) { + var proto, Ctor; + + // Detect obvious negatives + // Use toString instead of jQuery.type to catch host objects + if ( !obj || toString.call( obj ) !== "[object Object]" ) { + return false; + } + + proto = getProto( obj ); + + // Objects with no prototype (e.g., `Object.create( null )`) are plain + if ( !proto ) { + return true; + } + + // Objects with prototype are plain iff they were constructed by a global Object function + Ctor = hasOwn.call( proto, "constructor" ) && proto.constructor; + return typeof Ctor === "function" && fnToString.call( Ctor ) === ObjectFunctionString; + }, + + isEmptyObject: function( obj ) { + + /* eslint-disable no-unused-vars */ + // See https://github.com/eslint/eslint/issues/6125 + var name; + + for ( name in obj ) { + return false; + } + return true; + }, + + type: function( obj ) { + if ( obj == null ) { + return obj + ""; + } + + // Support: Android <=2.3 only (functionish RegExp) + return typeof obj === "object" || typeof obj === "function" ? + class2type[ toString.call( obj ) ] || "object" : + typeof obj; + }, + + // Evaluates a script in a global context + globalEval: function( code ) { + DOMEval( code ); + }, + + // Convert dashed to camelCase; used by the css and data modules + // Support: IE <=9 - 11, Edge 12 - 13 + // Microsoft forgot to hump their vendor prefix (#9572) + camelCase: function( string ) { + return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase ); + }, + + each: function( obj, callback ) { + var length, i = 0; + + if ( isArrayLike( obj ) ) { + length = obj.length; + for ( ; i < length; i++ ) { + if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { + break; + } + } + } else { + for ( i in obj ) { + if ( callback.call( obj[ i ], i, obj[ i ] ) === false ) { + break; + } + } + } + + return obj; + }, + + // Support: Android <=4.0 only + trim: function( text ) { + return text == null ? + "" : + ( text + "" ).replace( rtrim, "" ); + }, + + // results is for internal usage only + makeArray: function( arr, results ) { + var ret = results || []; + + if ( arr != null ) { + if ( isArrayLike( Object( arr ) ) ) { + jQuery.merge( ret, + typeof arr === "string" ? + [ arr ] : arr + ); + } else { + push.call( ret, arr ); + } + } + + return ret; + }, + + inArray: function( elem, arr, i ) { + return arr == null ? -1 : indexOf.call( arr, elem, i ); + }, + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + merge: function( first, second ) { + var len = +second.length, + j = 0, + i = first.length; + + for ( ; j < len; j++ ) { + first[ i++ ] = second[ j ]; + } + + first.length = i; + + return first; + }, + + grep: function( elems, callback, invert ) { + var callbackInverse, + matches = [], + i = 0, + length = elems.length, + callbackExpect = !invert; + + // Go through the array, only saving the items + // that pass the validator function + for ( ; i < length; i++ ) { + callbackInverse = !callback( elems[ i ], i ); + if ( callbackInverse !== callbackExpect ) { + matches.push( elems[ i ] ); + } + } + + return matches; + }, + + // arg is for internal usage only + map: function( elems, callback, arg ) { + var length, value, + i = 0, + ret = []; + + // Go through the array, translating each of the items to their new values + if ( isArrayLike( elems ) ) { + length = elems.length; + for ( ; i < length; i++ ) { + value = callback( elems[ i ], i, arg ); + + if ( value != null ) { + ret.push( value ); + } + } + + // Go through every key on the object, + } else { + for ( i in elems ) { + value = callback( elems[ i ], i, arg ); + + if ( value != null ) { + ret.push( value ); + } + } + } + + // Flatten any nested arrays + return concat.apply( [], ret ); + }, + + // A global GUID counter for objects + guid: 1, + + // Bind a function to a context, optionally partially applying any + // arguments. + proxy: function( fn, context ) { + var tmp, args, proxy; + + if ( typeof context === "string" ) { + tmp = fn[ context ]; + context = fn; + fn = tmp; + } + + // Quick check to determine if target is callable, in the spec + // this throws a TypeError, but we will just return undefined. + if ( !jQuery.isFunction( fn ) ) { + return undefined; + } + + // Simulated bind + args = slice.call( arguments, 2 ); + proxy = function() { + return fn.apply( context || this, args.concat( slice.call( arguments ) ) ); + }; + + // Set the guid of unique handler to the same of original handler, so it can be removed + proxy.guid = fn.guid = fn.guid || jQuery.guid++; + + return proxy; + }, + + now: Date.now, + + // jQuery.support is not used in Core but other projects attach their + // properties to it so it needs to exist. + support: support +} ); + +if ( typeof Symbol === "function" ) { + jQuery.fn[ Symbol.iterator ] = arr[ Symbol.iterator ]; +} + +// Populate the class2type map +jQuery.each( "Boolean Number String Function Array Date RegExp Object Error Symbol".split( " " ), +function( i, name ) { + class2type[ "[object " + name + "]" ] = name.toLowerCase(); +} ); + +function isArrayLike( obj ) { + + // Support: real iOS 8.2 only (not reproducible in simulator) + // `in` check used to prevent JIT error (gh-2145) + // hasOwn isn't used here due to false negatives + // regarding Nodelist length in IE + var length = !!obj && "length" in obj && obj.length, + type = jQuery.type( obj ); + + if ( type === "function" || jQuery.isWindow( obj ) ) { + return false; + } + + return type === "array" || length === 0 || + typeof length === "number" && length > 0 && ( length - 1 ) in obj; +} +var Sizzle = +/*! + * Sizzle CSS Selector Engine v2.3.3 + * https://sizzlejs.com/ + * + * Copyright jQuery Foundation and other contributors + * Released under the MIT license + * http://jquery.org/license + * + * Date: 2016-08-08 + */ +(function( window ) { + +var i, + support, + Expr, + getText, + isXML, + tokenize, + compile, + select, + outermostContext, + sortInput, + hasDuplicate, + + // Local document vars + setDocument, + document, + docElem, + documentIsHTML, + rbuggyQSA, + rbuggyMatches, + matches, + contains, + + // Instance-specific data + expando = "sizzle" + 1 * new Date(), + preferredDoc = window.document, + dirruns = 0, + done = 0, + classCache = createCache(), + tokenCache = createCache(), + compilerCache = createCache(), + sortOrder = function( a, b ) { + if ( a === b ) { + hasDuplicate = true; + } + return 0; + }, + + // Instance methods + hasOwn = ({}).hasOwnProperty, + arr = [], + pop = arr.pop, + push_native = arr.push, + push = arr.push, + slice = arr.slice, + // Use a stripped-down indexOf as it's faster than native + // https://jsperf.com/thor-indexof-vs-for/5 + indexOf = function( list, elem ) { + var i = 0, + len = list.length; + for ( ; i < len; i++ ) { + if ( list[i] === elem ) { + return i; + } + } + return -1; + }, + + booleans = "checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped", + + // Regular expressions + + // http://www.w3.org/TR/css3-selectors/#whitespace + whitespace = "[\\x20\\t\\r\\n\\f]", + + // http://www.w3.org/TR/CSS21/syndata.html#value-def-identifier + identifier = "(?:\\\\.|[\\w-]|[^\0-\\xa0])+", + + // Attribute selectors: http://www.w3.org/TR/selectors/#attribute-selectors + attributes = "\\[" + whitespace + "*(" + identifier + ")(?:" + whitespace + + // Operator (capture 2) + "*([*^$|!~]?=)" + whitespace + + // "Attribute values must be CSS identifiers [capture 5] or strings [capture 3 or capture 4]" + "*(?:'((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\"|(" + identifier + "))|)" + whitespace + + "*\\]", + + pseudos = ":(" + identifier + ")(?:\\((" + + // To reduce the number of selectors needing tokenize in the preFilter, prefer arguments: + // 1. quoted (capture 3; capture 4 or capture 5) + "('((?:\\\\.|[^\\\\'])*)'|\"((?:\\\\.|[^\\\\\"])*)\")|" + + // 2. simple (capture 6) + "((?:\\\\.|[^\\\\()[\\]]|" + attributes + ")*)|" + + // 3. anything else (capture 2) + ".*" + + ")\\)|)", + + // Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter + rwhitespace = new RegExp( whitespace + "+", "g" ), + rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + whitespace + "+$", "g" ), + + rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ), + rcombinators = new RegExp( "^" + whitespace + "*([>+~]|" + whitespace + ")" + whitespace + "*" ), + + rattributeQuotes = new RegExp( "=" + whitespace + "*([^\\]'\"]*?)" + whitespace + "*\\]", "g" ), + + rpseudo = new RegExp( pseudos ), + ridentifier = new RegExp( "^" + identifier + "$" ), + + matchExpr = { + "ID": new RegExp( "^#(" + identifier + ")" ), + "CLASS": new RegExp( "^\\.(" + identifier + ")" ), + "TAG": new RegExp( "^(" + identifier + "|[*])" ), + "ATTR": new RegExp( "^" + attributes ), + "PSEUDO": new RegExp( "^" + pseudos ), + "CHILD": new RegExp( "^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + whitespace + + "*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + whitespace + + "*(\\d+)|))" + whitespace + "*\\)|)", "i" ), + "bool": new RegExp( "^(?:" + booleans + ")$", "i" ), + // For use in libraries implementing .is() + // We use this for POS matching in `select` + "needsContext": new RegExp( "^" + whitespace + "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" + + whitespace + "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" ) + }, + + rinputs = /^(?:input|select|textarea|button)$/i, + rheader = /^h\d$/i, + + rnative = /^[^{]+\{\s*\[native \w/, + + // Easily-parseable/retrievable ID or TAG or CLASS selectors + rquickExpr = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/, + + rsibling = /[+~]/, + + // CSS escapes + // http://www.w3.org/TR/CSS21/syndata.html#escaped-characters + runescape = new RegExp( "\\\\([\\da-f]{1,6}" + whitespace + "?|(" + whitespace + ")|.)", "ig" ), + funescape = function( _, escaped, escapedWhitespace ) { + var high = "0x" + escaped - 0x10000; + // NaN means non-codepoint + // Support: Firefox<24 + // Workaround erroneous numeric interpretation of +"0x" + return high !== high || escapedWhitespace ? + escaped : + high < 0 ? + // BMP codepoint + String.fromCharCode( high + 0x10000 ) : + // Supplemental Plane codepoint (surrogate pair) + String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 ); + }, + + // CSS string/identifier serialization + // https://drafts.csswg.org/cssom/#common-serializing-idioms + rcssescape = /([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g, + fcssescape = function( ch, asCodePoint ) { + if ( asCodePoint ) { + + // U+0000 NULL becomes U+FFFD REPLACEMENT CHARACTER + if ( ch === "\0" ) { + return "\uFFFD"; + } + + // Control characters and (dependent upon position) numbers get escaped as code points + return ch.slice( 0, -1 ) + "\\" + ch.charCodeAt( ch.length - 1 ).toString( 16 ) + " "; + } + + // Other potentially-special ASCII characters get backslash-escaped + return "\\" + ch; + }, + + // Used for iframes + // See setDocument() + // Removing the function wrapper causes a "Permission Denied" + // error in IE + unloadHandler = function() { + setDocument(); + }, + + disabledAncestor = addCombinator( + function( elem ) { + return elem.disabled === true && ("form" in elem || "label" in elem); + }, + { dir: "parentNode", next: "legend" } + ); + +// Optimize for push.apply( _, NodeList ) +try { + push.apply( + (arr = slice.call( preferredDoc.childNodes )), + preferredDoc.childNodes + ); + // Support: Android<4.0 + // Detect silently failing push.apply + arr[ preferredDoc.childNodes.length ].nodeType; +} catch ( e ) { + push = { apply: arr.length ? + + // Leverage slice if possible + function( target, els ) { + push_native.apply( target, slice.call(els) ); + } : + + // Support: IE<9 + // Otherwise append directly + function( target, els ) { + var j = target.length, + i = 0; + // Can't trust NodeList.length + while ( (target[j++] = els[i++]) ) {} + target.length = j - 1; + } + }; +} + +function Sizzle( selector, context, results, seed ) { + var m, i, elem, nid, match, groups, newSelector, + newContext = context && context.ownerDocument, + + // nodeType defaults to 9, since context defaults to document + nodeType = context ? context.nodeType : 9; + + results = results || []; + + // Return early from calls with invalid selector or context + if ( typeof selector !== "string" || !selector || + nodeType !== 1 && nodeType !== 9 && nodeType !== 11 ) { + + return results; + } + + // Try to shortcut find operations (as opposed to filters) in HTML documents + if ( !seed ) { + + if ( ( context ? context.ownerDocument || context : preferredDoc ) !== document ) { + setDocument( context ); + } + context = context || document; + + if ( documentIsHTML ) { + + // If the selector is sufficiently simple, try using a "get*By*" DOM method + // (excepting DocumentFragment context, where the methods don't exist) + if ( nodeType !== 11 && (match = rquickExpr.exec( selector )) ) { + + // ID selector + if ( (m = match[1]) ) { + + // Document context + if ( nodeType === 9 ) { + if ( (elem = context.getElementById( m )) ) { + + // Support: IE, Opera, Webkit + // TODO: identify versions + // getElementById can match elements by name instead of ID + if ( elem.id === m ) { + results.push( elem ); + return results; + } + } else { + return results; + } + + // Element context + } else { + + // Support: IE, Opera, Webkit + // TODO: identify versions + // getElementById can match elements by name instead of ID + if ( newContext && (elem = newContext.getElementById( m )) && + contains( context, elem ) && + elem.id === m ) { + + results.push( elem ); + return results; + } + } + + // Type selector + } else if ( match[2] ) { + push.apply( results, context.getElementsByTagName( selector ) ); + return results; + + // Class selector + } else if ( (m = match[3]) && support.getElementsByClassName && + context.getElementsByClassName ) { + + push.apply( results, context.getElementsByClassName( m ) ); + return results; + } + } + + // Take advantage of querySelectorAll + if ( support.qsa && + !compilerCache[ selector + " " ] && + (!rbuggyQSA || !rbuggyQSA.test( selector )) ) { + + if ( nodeType !== 1 ) { + newContext = context; + newSelector = selector; + + // qSA looks outside Element context, which is not what we want + // Thanks to Andrew Dupont for this workaround technique + // Support: IE <=8 + // Exclude object elements + } else if ( context.nodeName.toLowerCase() !== "object" ) { + + // Capture the context ID, setting it first if necessary + if ( (nid = context.getAttribute( "id" )) ) { + nid = nid.replace( rcssescape, fcssescape ); + } else { + context.setAttribute( "id", (nid = expando) ); + } + + // Prefix every selector in the list + groups = tokenize( selector ); + i = groups.length; + while ( i-- ) { + groups[i] = "#" + nid + " " + toSelector( groups[i] ); + } + newSelector = groups.join( "," ); + + // Expand context for sibling selectors + newContext = rsibling.test( selector ) && testContext( context.parentNode ) || + context; + } + + if ( newSelector ) { + try { + push.apply( results, + newContext.querySelectorAll( newSelector ) + ); + return results; + } catch ( qsaError ) { + } finally { + if ( nid === expando ) { + context.removeAttribute( "id" ); + } + } + } + } + } + } + + // All others + return select( selector.replace( rtrim, "$1" ), context, results, seed ); +} + +/** + * Create key-value caches of limited size + * @returns {function(string, object)} Returns the Object data after storing it on itself with + * property name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength) + * deleting the oldest entry + */ +function createCache() { + var keys = []; + + function cache( key, value ) { + // Use (key + " ") to avoid collision with native prototype properties (see Issue #157) + if ( keys.push( key + " " ) > Expr.cacheLength ) { + // Only keep the most recent entries + delete cache[ keys.shift() ]; + } + return (cache[ key + " " ] = value); + } + return cache; +} + +/** + * Mark a function for special use by Sizzle + * @param {Function} fn The function to mark + */ +function markFunction( fn ) { + fn[ expando ] = true; + return fn; +} + +/** + * Support testing using an element + * @param {Function} fn Passed the created element and returns a boolean result + */ +function assert( fn ) { + var el = document.createElement("fieldset"); + + try { + return !!fn( el ); + } catch (e) { + return false; + } finally { + // Remove from its parent by default + if ( el.parentNode ) { + el.parentNode.removeChild( el ); + } + // release memory in IE + el = null; + } +} + +/** + * Adds the same handler for all of the specified attrs + * @param {String} attrs Pipe-separated list of attributes + * @param {Function} handler The method that will be applied + */ +function addHandle( attrs, handler ) { + var arr = attrs.split("|"), + i = arr.length; + + while ( i-- ) { + Expr.attrHandle[ arr[i] ] = handler; + } +} + +/** + * Checks document order of two siblings + * @param {Element} a + * @param {Element} b + * @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b + */ +function siblingCheck( a, b ) { + var cur = b && a, + diff = cur && a.nodeType === 1 && b.nodeType === 1 && + a.sourceIndex - b.sourceIndex; + + // Use IE sourceIndex if available on both nodes + if ( diff ) { + return diff; + } + + // Check if b follows a + if ( cur ) { + while ( (cur = cur.nextSibling) ) { + if ( cur === b ) { + return -1; + } + } + } + + return a ? 1 : -1; +} + +/** + * Returns a function to use in pseudos for input types + * @param {String} type + */ +function createInputPseudo( type ) { + return function( elem ) { + var name = elem.nodeName.toLowerCase(); + return name === "input" && elem.type === type; + }; +} + +/** + * Returns a function to use in pseudos for buttons + * @param {String} type + */ +function createButtonPseudo( type ) { + return function( elem ) { + var name = elem.nodeName.toLowerCase(); + return (name === "input" || name === "button") && elem.type === type; + }; +} + +/** + * Returns a function to use in pseudos for :enabled/:disabled + * @param {Boolean} disabled true for :disabled; false for :enabled + */ +function createDisabledPseudo( disabled ) { + + // Known :disabled false positives: fieldset[disabled] > legend:nth-of-type(n+2) :can-disable + return function( elem ) { + + // Only certain elements can match :enabled or :disabled + // https://html.spec.whatwg.org/multipage/scripting.html#selector-enabled + // https://html.spec.whatwg.org/multipage/scripting.html#selector-disabled + if ( "form" in elem ) { + + // Check for inherited disabledness on relevant non-disabled elements: + // * listed form-associated elements in a disabled fieldset + // https://html.spec.whatwg.org/multipage/forms.html#category-listed + // https://html.spec.whatwg.org/multipage/forms.html#concept-fe-disabled + // * option elements in a disabled optgroup + // https://html.spec.whatwg.org/multipage/forms.html#concept-option-disabled + // All such elements have a "form" property. + if ( elem.parentNode && elem.disabled === false ) { + + // Option elements defer to a parent optgroup if present + if ( "label" in elem ) { + if ( "label" in elem.parentNode ) { + return elem.parentNode.disabled === disabled; + } else { + return elem.disabled === disabled; + } + } + + // Support: IE 6 - 11 + // Use the isDisabled shortcut property to check for disabled fieldset ancestors + return elem.isDisabled === disabled || + + // Where there is no isDisabled, check manually + /* jshint -W018 */ + elem.isDisabled !== !disabled && + disabledAncestor( elem ) === disabled; + } + + return elem.disabled === disabled; + + // Try to winnow out elements that can't be disabled before trusting the disabled property. + // Some victims get caught in our net (label, legend, menu, track), but it shouldn't + // even exist on them, let alone have a boolean value. + } else if ( "label" in elem ) { + return elem.disabled === disabled; + } + + // Remaining elements are neither :enabled nor :disabled + return false; + }; +} + +/** + * Returns a function to use in pseudos for positionals + * @param {Function} fn + */ +function createPositionalPseudo( fn ) { + return markFunction(function( argument ) { + argument = +argument; + return markFunction(function( seed, matches ) { + var j, + matchIndexes = fn( [], seed.length, argument ), + i = matchIndexes.length; + + // Match elements found at the specified indexes + while ( i-- ) { + if ( seed[ (j = matchIndexes[i]) ] ) { + seed[j] = !(matches[j] = seed[j]); + } + } + }); + }); +} + +/** + * Checks a node for validity as a Sizzle context + * @param {Element|Object=} context + * @returns {Element|Object|Boolean} The input node if acceptable, otherwise a falsy value + */ +function testContext( context ) { + return context && typeof context.getElementsByTagName !== "undefined" && context; +} + +// Expose support vars for convenience +support = Sizzle.support = {}; + +/** + * Detects XML nodes + * @param {Element|Object} elem An element or a document + * @returns {Boolean} True iff elem is a non-HTML XML node + */ +isXML = Sizzle.isXML = function( elem ) { + // documentElement is verified for cases where it doesn't yet exist + // (such as loading iframes in IE - #4833) + var documentElement = elem && (elem.ownerDocument || elem).documentElement; + return documentElement ? documentElement.nodeName !== "HTML" : false; +}; + +/** + * Sets document-related variables once based on the current document + * @param {Element|Object} [doc] An element or document object to use to set the document + * @returns {Object} Returns the current document + */ +setDocument = Sizzle.setDocument = function( node ) { + var hasCompare, subWindow, + doc = node ? node.ownerDocument || node : preferredDoc; + + // Return early if doc is invalid or already selected + if ( doc === document || doc.nodeType !== 9 || !doc.documentElement ) { + return document; + } + + // Update global variables + document = doc; + docElem = document.documentElement; + documentIsHTML = !isXML( document ); + + // Support: IE 9-11, Edge + // Accessing iframe documents after unload throws "permission denied" errors (jQuery #13936) + if ( preferredDoc !== document && + (subWindow = document.defaultView) && subWindow.top !== subWindow ) { + + // Support: IE 11, Edge + if ( subWindow.addEventListener ) { + subWindow.addEventListener( "unload", unloadHandler, false ); + + // Support: IE 9 - 10 only + } else if ( subWindow.attachEvent ) { + subWindow.attachEvent( "onunload", unloadHandler ); + } + } + + /* Attributes + ---------------------------------------------------------------------- */ + + // Support: IE<8 + // Verify that getAttribute really returns attributes and not properties + // (excepting IE8 booleans) + support.attributes = assert(function( el ) { + el.className = "i"; + return !el.getAttribute("className"); + }); + + /* getElement(s)By* + ---------------------------------------------------------------------- */ + + // Check if getElementsByTagName("*") returns only elements + support.getElementsByTagName = assert(function( el ) { + el.appendChild( document.createComment("") ); + return !el.getElementsByTagName("*").length; + }); + + // Support: IE<9 + support.getElementsByClassName = rnative.test( document.getElementsByClassName ); + + // Support: IE<10 + // Check if getElementById returns elements by name + // The broken getElementById methods don't pick up programmatically-set names, + // so use a roundabout getElementsByName test + support.getById = assert(function( el ) { + docElem.appendChild( el ).id = expando; + return !document.getElementsByName || !document.getElementsByName( expando ).length; + }); + + // ID filter and find + if ( support.getById ) { + Expr.filter["ID"] = function( id ) { + var attrId = id.replace( runescape, funescape ); + return function( elem ) { + return elem.getAttribute("id") === attrId; + }; + }; + Expr.find["ID"] = function( id, context ) { + if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { + var elem = context.getElementById( id ); + return elem ? [ elem ] : []; + } + }; + } else { + Expr.filter["ID"] = function( id ) { + var attrId = id.replace( runescape, funescape ); + return function( elem ) { + var node = typeof elem.getAttributeNode !== "undefined" && + elem.getAttributeNode("id"); + return node && node.value === attrId; + }; + }; + + // Support: IE 6 - 7 only + // getElementById is not reliable as a find shortcut + Expr.find["ID"] = function( id, context ) { + if ( typeof context.getElementById !== "undefined" && documentIsHTML ) { + var node, i, elems, + elem = context.getElementById( id ); + + if ( elem ) { + + // Verify the id attribute + node = elem.getAttributeNode("id"); + if ( node && node.value === id ) { + return [ elem ]; + } + + // Fall back on getElementsByName + elems = context.getElementsByName( id ); + i = 0; + while ( (elem = elems[i++]) ) { + node = elem.getAttributeNode("id"); + if ( node && node.value === id ) { + return [ elem ]; + } + } + } + + return []; + } + }; + } + + // Tag + Expr.find["TAG"] = support.getElementsByTagName ? + function( tag, context ) { + if ( typeof context.getElementsByTagName !== "undefined" ) { + return context.getElementsByTagName( tag ); + + // DocumentFragment nodes don't have gEBTN + } else if ( support.qsa ) { + return context.querySelectorAll( tag ); + } + } : + + function( tag, context ) { + var elem, + tmp = [], + i = 0, + // By happy coincidence, a (broken) gEBTN appears on DocumentFragment nodes too + results = context.getElementsByTagName( tag ); + + // Filter out possible comments + if ( tag === "*" ) { + while ( (elem = results[i++]) ) { + if ( elem.nodeType === 1 ) { + tmp.push( elem ); + } + } + + return tmp; + } + return results; + }; + + // Class + Expr.find["CLASS"] = support.getElementsByClassName && function( className, context ) { + if ( typeof context.getElementsByClassName !== "undefined" && documentIsHTML ) { + return context.getElementsByClassName( className ); + } + }; + + /* QSA/matchesSelector + ---------------------------------------------------------------------- */ + + // QSA and matchesSelector support + + // matchesSelector(:active) reports false when true (IE9/Opera 11.5) + rbuggyMatches = []; + + // qSa(:focus) reports false when true (Chrome 21) + // We allow this because of a bug in IE8/9 that throws an error + // whenever `document.activeElement` is accessed on an iframe + // So, we allow :focus to pass through QSA all the time to avoid the IE error + // See https://bugs.jquery.com/ticket/13378 + rbuggyQSA = []; + + if ( (support.qsa = rnative.test( document.querySelectorAll )) ) { + // Build QSA regex + // Regex strategy adopted from Diego Perini + assert(function( el ) { + // Select is set to empty string on purpose + // This is to test IE's treatment of not explicitly + // setting a boolean content attribute, + // since its presence should be enough + // https://bugs.jquery.com/ticket/12359 + docElem.appendChild( el ).innerHTML = "" + + ""; + + // Support: IE8, Opera 11-12.16 + // Nothing should be selected when empty strings follow ^= or $= or *= + // The test attribute must be unknown in Opera but "safe" for WinRT + // https://msdn.microsoft.com/en-us/library/ie/hh465388.aspx#attribute_section + if ( el.querySelectorAll("[msallowcapture^='']").length ) { + rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:''|\"\")" ); + } + + // Support: IE8 + // Boolean attributes and "value" are not treated correctly + if ( !el.querySelectorAll("[selected]").length ) { + rbuggyQSA.push( "\\[" + whitespace + "*(?:value|" + booleans + ")" ); + } + + // Support: Chrome<29, Android<4.4, Safari<7.0+, iOS<7.0+, PhantomJS<1.9.8+ + if ( !el.querySelectorAll( "[id~=" + expando + "-]" ).length ) { + rbuggyQSA.push("~="); + } + + // Webkit/Opera - :checked should return selected option elements + // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked + // IE8 throws error here and will not see later tests + if ( !el.querySelectorAll(":checked").length ) { + rbuggyQSA.push(":checked"); + } + + // Support: Safari 8+, iOS 8+ + // https://bugs.webkit.org/show_bug.cgi?id=136851 + // In-page `selector#id sibling-combinator selector` fails + if ( !el.querySelectorAll( "a#" + expando + "+*" ).length ) { + rbuggyQSA.push(".#.+[+~]"); + } + }); + + assert(function( el ) { + el.innerHTML = "" + + ""; + + // Support: Windows 8 Native Apps + // The type and name attributes are restricted during .innerHTML assignment + var input = document.createElement("input"); + input.setAttribute( "type", "hidden" ); + el.appendChild( input ).setAttribute( "name", "D" ); + + // Support: IE8 + // Enforce case-sensitivity of name attribute + if ( el.querySelectorAll("[name=d]").length ) { + rbuggyQSA.push( "name" + whitespace + "*[*^$|!~]?=" ); + } + + // FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled) + // IE8 throws error here and will not see later tests + if ( el.querySelectorAll(":enabled").length !== 2 ) { + rbuggyQSA.push( ":enabled", ":disabled" ); + } + + // Support: IE9-11+ + // IE's :disabled selector does not pick up the children of disabled fieldsets + docElem.appendChild( el ).disabled = true; + if ( el.querySelectorAll(":disabled").length !== 2 ) { + rbuggyQSA.push( ":enabled", ":disabled" ); + } + + // Opera 10-11 does not throw on post-comma invalid pseudos + el.querySelectorAll("*,:x"); + rbuggyQSA.push(",.*:"); + }); + } + + if ( (support.matchesSelector = rnative.test( (matches = docElem.matches || + docElem.webkitMatchesSelector || + docElem.mozMatchesSelector || + docElem.oMatchesSelector || + docElem.msMatchesSelector) )) ) { + + assert(function( el ) { + // Check to see if it's possible to do matchesSelector + // on a disconnected node (IE 9) + support.disconnectedMatch = matches.call( el, "*" ); + + // This should fail with an exception + // Gecko does not error, returns false instead + matches.call( el, "[s!='']:x" ); + rbuggyMatches.push( "!=", pseudos ); + }); + } + + rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join("|") ); + rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join("|") ); + + /* Contains + ---------------------------------------------------------------------- */ + hasCompare = rnative.test( docElem.compareDocumentPosition ); + + // Element contains another + // Purposefully self-exclusive + // As in, an element does not contain itself + contains = hasCompare || rnative.test( docElem.contains ) ? + function( a, b ) { + var adown = a.nodeType === 9 ? a.documentElement : a, + bup = b && b.parentNode; + return a === bup || !!( bup && bup.nodeType === 1 && ( + adown.contains ? + adown.contains( bup ) : + a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16 + )); + } : + function( a, b ) { + if ( b ) { + while ( (b = b.parentNode) ) { + if ( b === a ) { + return true; + } + } + } + return false; + }; + + /* Sorting + ---------------------------------------------------------------------- */ + + // Document order sorting + sortOrder = hasCompare ? + function( a, b ) { + + // Flag for duplicate removal + if ( a === b ) { + hasDuplicate = true; + return 0; + } + + // Sort on method existence if only one input has compareDocumentPosition + var compare = !a.compareDocumentPosition - !b.compareDocumentPosition; + if ( compare ) { + return compare; + } + + // Calculate position if both inputs belong to the same document + compare = ( a.ownerDocument || a ) === ( b.ownerDocument || b ) ? + a.compareDocumentPosition( b ) : + + // Otherwise we know they are disconnected + 1; + + // Disconnected nodes + if ( compare & 1 || + (!support.sortDetached && b.compareDocumentPosition( a ) === compare) ) { + + // Choose the first element that is related to our preferred document + if ( a === document || a.ownerDocument === preferredDoc && contains(preferredDoc, a) ) { + return -1; + } + if ( b === document || b.ownerDocument === preferredDoc && contains(preferredDoc, b) ) { + return 1; + } + + // Maintain original order + return sortInput ? + ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : + 0; + } + + return compare & 4 ? -1 : 1; + } : + function( a, b ) { + // Exit early if the nodes are identical + if ( a === b ) { + hasDuplicate = true; + return 0; + } + + var cur, + i = 0, + aup = a.parentNode, + bup = b.parentNode, + ap = [ a ], + bp = [ b ]; + + // Parentless nodes are either documents or disconnected + if ( !aup || !bup ) { + return a === document ? -1 : + b === document ? 1 : + aup ? -1 : + bup ? 1 : + sortInput ? + ( indexOf( sortInput, a ) - indexOf( sortInput, b ) ) : + 0; + + // If the nodes are siblings, we can do a quick check + } else if ( aup === bup ) { + return siblingCheck( a, b ); + } + + // Otherwise we need full lists of their ancestors for comparison + cur = a; + while ( (cur = cur.parentNode) ) { + ap.unshift( cur ); + } + cur = b; + while ( (cur = cur.parentNode) ) { + bp.unshift( cur ); + } + + // Walk down the tree looking for a discrepancy + while ( ap[i] === bp[i] ) { + i++; + } + + return i ? + // Do a sibling check if the nodes have a common ancestor + siblingCheck( ap[i], bp[i] ) : + + // Otherwise nodes in our document sort first + ap[i] === preferredDoc ? -1 : + bp[i] === preferredDoc ? 1 : + 0; + }; + + return document; +}; + +Sizzle.matches = function( expr, elements ) { + return Sizzle( expr, null, null, elements ); +}; + +Sizzle.matchesSelector = function( elem, expr ) { + // Set document vars if needed + if ( ( elem.ownerDocument || elem ) !== document ) { + setDocument( elem ); + } + + // Make sure that attribute selectors are quoted + expr = expr.replace( rattributeQuotes, "='$1']" ); + + if ( support.matchesSelector && documentIsHTML && + !compilerCache[ expr + " " ] && + ( !rbuggyMatches || !rbuggyMatches.test( expr ) ) && + ( !rbuggyQSA || !rbuggyQSA.test( expr ) ) ) { + + try { + var ret = matches.call( elem, expr ); + + // IE 9's matchesSelector returns false on disconnected nodes + if ( ret || support.disconnectedMatch || + // As well, disconnected nodes are said to be in a document + // fragment in IE 9 + elem.document && elem.document.nodeType !== 11 ) { + return ret; + } + } catch (e) {} + } + + return Sizzle( expr, document, null, [ elem ] ).length > 0; +}; + +Sizzle.contains = function( context, elem ) { + // Set document vars if needed + if ( ( context.ownerDocument || context ) !== document ) { + setDocument( context ); + } + return contains( context, elem ); +}; + +Sizzle.attr = function( elem, name ) { + // Set document vars if needed + if ( ( elem.ownerDocument || elem ) !== document ) { + setDocument( elem ); + } + + var fn = Expr.attrHandle[ name.toLowerCase() ], + // Don't get fooled by Object.prototype properties (jQuery #13807) + val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ? + fn( elem, name, !documentIsHTML ) : + undefined; + + return val !== undefined ? + val : + support.attributes || !documentIsHTML ? + elem.getAttribute( name ) : + (val = elem.getAttributeNode(name)) && val.specified ? + val.value : + null; +}; + +Sizzle.escape = function( sel ) { + return (sel + "").replace( rcssescape, fcssescape ); +}; + +Sizzle.error = function( msg ) { + throw new Error( "Syntax error, unrecognized expression: " + msg ); +}; + +/** + * Document sorting and removing duplicates + * @param {ArrayLike} results + */ +Sizzle.uniqueSort = function( results ) { + var elem, + duplicates = [], + j = 0, + i = 0; + + // Unless we *know* we can detect duplicates, assume their presence + hasDuplicate = !support.detectDuplicates; + sortInput = !support.sortStable && results.slice( 0 ); + results.sort( sortOrder ); + + if ( hasDuplicate ) { + while ( (elem = results[i++]) ) { + if ( elem === results[ i ] ) { + j = duplicates.push( i ); + } + } + while ( j-- ) { + results.splice( duplicates[ j ], 1 ); + } + } + + // Clear input after sorting to release objects + // See https://github.com/jquery/sizzle/pull/225 + sortInput = null; + + return results; +}; + +/** + * Utility function for retrieving the text value of an array of DOM nodes + * @param {Array|Element} elem + */ +getText = Sizzle.getText = function( elem ) { + var node, + ret = "", + i = 0, + nodeType = elem.nodeType; + + if ( !nodeType ) { + // If no nodeType, this is expected to be an array + while ( (node = elem[i++]) ) { + // Do not traverse comment nodes + ret += getText( node ); + } + } else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) { + // Use textContent for elements + // innerText usage removed for consistency of new lines (jQuery #11153) + if ( typeof elem.textContent === "string" ) { + return elem.textContent; + } else { + // Traverse its children + for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { + ret += getText( elem ); + } + } + } else if ( nodeType === 3 || nodeType === 4 ) { + return elem.nodeValue; + } + // Do not include comment or processing instruction nodes + + return ret; +}; + +Expr = Sizzle.selectors = { + + // Can be adjusted by the user + cacheLength: 50, + + createPseudo: markFunction, + + match: matchExpr, + + attrHandle: {}, + + find: {}, + + relative: { + ">": { dir: "parentNode", first: true }, + " ": { dir: "parentNode" }, + "+": { dir: "previousSibling", first: true }, + "~": { dir: "previousSibling" } + }, + + preFilter: { + "ATTR": function( match ) { + match[1] = match[1].replace( runescape, funescape ); + + // Move the given value to match[3] whether quoted or unquoted + match[3] = ( match[3] || match[4] || match[5] || "" ).replace( runescape, funescape ); + + if ( match[2] === "~=" ) { + match[3] = " " + match[3] + " "; + } + + return match.slice( 0, 4 ); + }, + + "CHILD": function( match ) { + /* matches from matchExpr["CHILD"] + 1 type (only|nth|...) + 2 what (child|of-type) + 3 argument (even|odd|\d*|\d*n([+-]\d+)?|...) + 4 xn-component of xn+y argument ([+-]?\d*n|) + 5 sign of xn-component + 6 x of xn-component + 7 sign of y-component + 8 y of y-component + */ + match[1] = match[1].toLowerCase(); + + if ( match[1].slice( 0, 3 ) === "nth" ) { + // nth-* requires argument + if ( !match[3] ) { + Sizzle.error( match[0] ); + } + + // numeric x and y parameters for Expr.filter.CHILD + // remember that false/true cast respectively to 0/1 + match[4] = +( match[4] ? match[5] + (match[6] || 1) : 2 * ( match[3] === "even" || match[3] === "odd" ) ); + match[5] = +( ( match[7] + match[8] ) || match[3] === "odd" ); + + // other types prohibit arguments + } else if ( match[3] ) { + Sizzle.error( match[0] ); + } + + return match; + }, + + "PSEUDO": function( match ) { + var excess, + unquoted = !match[6] && match[2]; + + if ( matchExpr["CHILD"].test( match[0] ) ) { + return null; + } + + // Accept quoted arguments as-is + if ( match[3] ) { + match[2] = match[4] || match[5] || ""; + + // Strip excess characters from unquoted arguments + } else if ( unquoted && rpseudo.test( unquoted ) && + // Get excess from tokenize (recursively) + (excess = tokenize( unquoted, true )) && + // advance to the next closing parenthesis + (excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length) ) { + + // excess is a negative index + match[0] = match[0].slice( 0, excess ); + match[2] = unquoted.slice( 0, excess ); + } + + // Return only captures needed by the pseudo filter method (type and argument) + return match.slice( 0, 3 ); + } + }, + + filter: { + + "TAG": function( nodeNameSelector ) { + var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase(); + return nodeNameSelector === "*" ? + function() { return true; } : + function( elem ) { + return elem.nodeName && elem.nodeName.toLowerCase() === nodeName; + }; + }, + + "CLASS": function( className ) { + var pattern = classCache[ className + " " ]; + + return pattern || + (pattern = new RegExp( "(^|" + whitespace + ")" + className + "(" + whitespace + "|$)" )) && + classCache( className, function( elem ) { + return pattern.test( typeof elem.className === "string" && elem.className || typeof elem.getAttribute !== "undefined" && elem.getAttribute("class") || "" ); + }); + }, + + "ATTR": function( name, operator, check ) { + return function( elem ) { + var result = Sizzle.attr( elem, name ); + + if ( result == null ) { + return operator === "!="; + } + if ( !operator ) { + return true; + } + + result += ""; + + return operator === "=" ? result === check : + operator === "!=" ? result !== check : + operator === "^=" ? check && result.indexOf( check ) === 0 : + operator === "*=" ? check && result.indexOf( check ) > -1 : + operator === "$=" ? check && result.slice( -check.length ) === check : + operator === "~=" ? ( " " + result.replace( rwhitespace, " " ) + " " ).indexOf( check ) > -1 : + operator === "|=" ? result === check || result.slice( 0, check.length + 1 ) === check + "-" : + false; + }; + }, + + "CHILD": function( type, what, argument, first, last ) { + var simple = type.slice( 0, 3 ) !== "nth", + forward = type.slice( -4 ) !== "last", + ofType = what === "of-type"; + + return first === 1 && last === 0 ? + + // Shortcut for :nth-*(n) + function( elem ) { + return !!elem.parentNode; + } : + + function( elem, context, xml ) { + var cache, uniqueCache, outerCache, node, nodeIndex, start, + dir = simple !== forward ? "nextSibling" : "previousSibling", + parent = elem.parentNode, + name = ofType && elem.nodeName.toLowerCase(), + useCache = !xml && !ofType, + diff = false; + + if ( parent ) { + + // :(first|last|only)-(child|of-type) + if ( simple ) { + while ( dir ) { + node = elem; + while ( (node = node[ dir ]) ) { + if ( ofType ? + node.nodeName.toLowerCase() === name : + node.nodeType === 1 ) { + + return false; + } + } + // Reverse direction for :only-* (if we haven't yet done so) + start = dir = type === "only" && !start && "nextSibling"; + } + return true; + } + + start = [ forward ? parent.firstChild : parent.lastChild ]; + + // non-xml :nth-child(...) stores cache data on `parent` + if ( forward && useCache ) { + + // Seek `elem` from a previously-cached index + + // ...in a gzip-friendly way + node = parent; + outerCache = node[ expando ] || (node[ expando ] = {}); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + (outerCache[ node.uniqueID ] = {}); + + cache = uniqueCache[ type ] || []; + nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; + diff = nodeIndex && cache[ 2 ]; + node = nodeIndex && parent.childNodes[ nodeIndex ]; + + while ( (node = ++nodeIndex && node && node[ dir ] || + + // Fallback to seeking `elem` from the start + (diff = nodeIndex = 0) || start.pop()) ) { + + // When found, cache indexes on `parent` and break + if ( node.nodeType === 1 && ++diff && node === elem ) { + uniqueCache[ type ] = [ dirruns, nodeIndex, diff ]; + break; + } + } + + } else { + // Use previously-cached element index if available + if ( useCache ) { + // ...in a gzip-friendly way + node = elem; + outerCache = node[ expando ] || (node[ expando ] = {}); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + (outerCache[ node.uniqueID ] = {}); + + cache = uniqueCache[ type ] || []; + nodeIndex = cache[ 0 ] === dirruns && cache[ 1 ]; + diff = nodeIndex; + } + + // xml :nth-child(...) + // or :nth-last-child(...) or :nth(-last)?-of-type(...) + if ( diff === false ) { + // Use the same loop as above to seek `elem` from the start + while ( (node = ++nodeIndex && node && node[ dir ] || + (diff = nodeIndex = 0) || start.pop()) ) { + + if ( ( ofType ? + node.nodeName.toLowerCase() === name : + node.nodeType === 1 ) && + ++diff ) { + + // Cache the index of each encountered element + if ( useCache ) { + outerCache = node[ expando ] || (node[ expando ] = {}); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ node.uniqueID ] || + (outerCache[ node.uniqueID ] = {}); + + uniqueCache[ type ] = [ dirruns, diff ]; + } + + if ( node === elem ) { + break; + } + } + } + } + } + + // Incorporate the offset, then check against cycle size + diff -= last; + return diff === first || ( diff % first === 0 && diff / first >= 0 ); + } + }; + }, + + "PSEUDO": function( pseudo, argument ) { + // pseudo-class names are case-insensitive + // http://www.w3.org/TR/selectors/#pseudo-classes + // Prioritize by case sensitivity in case custom pseudos are added with uppercase letters + // Remember that setFilters inherits from pseudos + var args, + fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] || + Sizzle.error( "unsupported pseudo: " + pseudo ); + + // The user may use createPseudo to indicate that + // arguments are needed to create the filter function + // just as Sizzle does + if ( fn[ expando ] ) { + return fn( argument ); + } + + // But maintain support for old signatures + if ( fn.length > 1 ) { + args = [ pseudo, pseudo, "", argument ]; + return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ? + markFunction(function( seed, matches ) { + var idx, + matched = fn( seed, argument ), + i = matched.length; + while ( i-- ) { + idx = indexOf( seed, matched[i] ); + seed[ idx ] = !( matches[ idx ] = matched[i] ); + } + }) : + function( elem ) { + return fn( elem, 0, args ); + }; + } + + return fn; + } + }, + + pseudos: { + // Potentially complex pseudos + "not": markFunction(function( selector ) { + // Trim the selector passed to compile + // to avoid treating leading and trailing + // spaces as combinators + var input = [], + results = [], + matcher = compile( selector.replace( rtrim, "$1" ) ); + + return matcher[ expando ] ? + markFunction(function( seed, matches, context, xml ) { + var elem, + unmatched = matcher( seed, null, xml, [] ), + i = seed.length; + + // Match elements unmatched by `matcher` + while ( i-- ) { + if ( (elem = unmatched[i]) ) { + seed[i] = !(matches[i] = elem); + } + } + }) : + function( elem, context, xml ) { + input[0] = elem; + matcher( input, null, xml, results ); + // Don't keep the element (issue #299) + input[0] = null; + return !results.pop(); + }; + }), + + "has": markFunction(function( selector ) { + return function( elem ) { + return Sizzle( selector, elem ).length > 0; + }; + }), + + "contains": markFunction(function( text ) { + text = text.replace( runescape, funescape ); + return function( elem ) { + return ( elem.textContent || elem.innerText || getText( elem ) ).indexOf( text ) > -1; + }; + }), + + // "Whether an element is represented by a :lang() selector + // is based solely on the element's language value + // being equal to the identifier C, + // or beginning with the identifier C immediately followed by "-". + // The matching of C against the element's language value is performed case-insensitively. + // The identifier C does not have to be a valid language name." + // http://www.w3.org/TR/selectors/#lang-pseudo + "lang": markFunction( function( lang ) { + // lang value must be a valid identifier + if ( !ridentifier.test(lang || "") ) { + Sizzle.error( "unsupported lang: " + lang ); + } + lang = lang.replace( runescape, funescape ).toLowerCase(); + return function( elem ) { + var elemLang; + do { + if ( (elemLang = documentIsHTML ? + elem.lang : + elem.getAttribute("xml:lang") || elem.getAttribute("lang")) ) { + + elemLang = elemLang.toLowerCase(); + return elemLang === lang || elemLang.indexOf( lang + "-" ) === 0; + } + } while ( (elem = elem.parentNode) && elem.nodeType === 1 ); + return false; + }; + }), + + // Miscellaneous + "target": function( elem ) { + var hash = window.location && window.location.hash; + return hash && hash.slice( 1 ) === elem.id; + }, + + "root": function( elem ) { + return elem === docElem; + }, + + "focus": function( elem ) { + return elem === document.activeElement && (!document.hasFocus || document.hasFocus()) && !!(elem.type || elem.href || ~elem.tabIndex); + }, + + // Boolean properties + "enabled": createDisabledPseudo( false ), + "disabled": createDisabledPseudo( true ), + + "checked": function( elem ) { + // In CSS3, :checked should return both checked and selected elements + // http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked + var nodeName = elem.nodeName.toLowerCase(); + return (nodeName === "input" && !!elem.checked) || (nodeName === "option" && !!elem.selected); + }, + + "selected": function( elem ) { + // Accessing this property makes selected-by-default + // options in Safari work properly + if ( elem.parentNode ) { + elem.parentNode.selectedIndex; + } + + return elem.selected === true; + }, + + // Contents + "empty": function( elem ) { + // http://www.w3.org/TR/selectors/#empty-pseudo + // :empty is negated by element (1) or content nodes (text: 3; cdata: 4; entity ref: 5), + // but not by others (comment: 8; processing instruction: 7; etc.) + // nodeType < 6 works because attributes (2) do not appear as children + for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) { + if ( elem.nodeType < 6 ) { + return false; + } + } + return true; + }, + + "parent": function( elem ) { + return !Expr.pseudos["empty"]( elem ); + }, + + // Element/input types + "header": function( elem ) { + return rheader.test( elem.nodeName ); + }, + + "input": function( elem ) { + return rinputs.test( elem.nodeName ); + }, + + "button": function( elem ) { + var name = elem.nodeName.toLowerCase(); + return name === "input" && elem.type === "button" || name === "button"; + }, + + "text": function( elem ) { + var attr; + return elem.nodeName.toLowerCase() === "input" && + elem.type === "text" && + + // Support: IE<8 + // New HTML5 attribute values (e.g., "search") appear with elem.type === "text" + ( (attr = elem.getAttribute("type")) == null || attr.toLowerCase() === "text" ); + }, + + // Position-in-collection + "first": createPositionalPseudo(function() { + return [ 0 ]; + }), + + "last": createPositionalPseudo(function( matchIndexes, length ) { + return [ length - 1 ]; + }), + + "eq": createPositionalPseudo(function( matchIndexes, length, argument ) { + return [ argument < 0 ? argument + length : argument ]; + }), + + "even": createPositionalPseudo(function( matchIndexes, length ) { + var i = 0; + for ( ; i < length; i += 2 ) { + matchIndexes.push( i ); + } + return matchIndexes; + }), + + "odd": createPositionalPseudo(function( matchIndexes, length ) { + var i = 1; + for ( ; i < length; i += 2 ) { + matchIndexes.push( i ); + } + return matchIndexes; + }), + + "lt": createPositionalPseudo(function( matchIndexes, length, argument ) { + var i = argument < 0 ? argument + length : argument; + for ( ; --i >= 0; ) { + matchIndexes.push( i ); + } + return matchIndexes; + }), + + "gt": createPositionalPseudo(function( matchIndexes, length, argument ) { + var i = argument < 0 ? argument + length : argument; + for ( ; ++i < length; ) { + matchIndexes.push( i ); + } + return matchIndexes; + }) + } +}; + +Expr.pseudos["nth"] = Expr.pseudos["eq"]; + +// Add button/input type pseudos +for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) { + Expr.pseudos[ i ] = createInputPseudo( i ); +} +for ( i in { submit: true, reset: true } ) { + Expr.pseudos[ i ] = createButtonPseudo( i ); +} + +// Easy API for creating new setFilters +function setFilters() {} +setFilters.prototype = Expr.filters = Expr.pseudos; +Expr.setFilters = new setFilters(); + +tokenize = Sizzle.tokenize = function( selector, parseOnly ) { + var matched, match, tokens, type, + soFar, groups, preFilters, + cached = tokenCache[ selector + " " ]; + + if ( cached ) { + return parseOnly ? 0 : cached.slice( 0 ); + } + + soFar = selector; + groups = []; + preFilters = Expr.preFilter; + + while ( soFar ) { + + // Comma and first run + if ( !matched || (match = rcomma.exec( soFar )) ) { + if ( match ) { + // Don't consume trailing commas as valid + soFar = soFar.slice( match[0].length ) || soFar; + } + groups.push( (tokens = []) ); + } + + matched = false; + + // Combinators + if ( (match = rcombinators.exec( soFar )) ) { + matched = match.shift(); + tokens.push({ + value: matched, + // Cast descendant combinators to space + type: match[0].replace( rtrim, " " ) + }); + soFar = soFar.slice( matched.length ); + } + + // Filters + for ( type in Expr.filter ) { + if ( (match = matchExpr[ type ].exec( soFar )) && (!preFilters[ type ] || + (match = preFilters[ type ]( match ))) ) { + matched = match.shift(); + tokens.push({ + value: matched, + type: type, + matches: match + }); + soFar = soFar.slice( matched.length ); + } + } + + if ( !matched ) { + break; + } + } + + // Return the length of the invalid excess + // if we're just parsing + // Otherwise, throw an error or return tokens + return parseOnly ? + soFar.length : + soFar ? + Sizzle.error( selector ) : + // Cache the tokens + tokenCache( selector, groups ).slice( 0 ); +}; + +function toSelector( tokens ) { + var i = 0, + len = tokens.length, + selector = ""; + for ( ; i < len; i++ ) { + selector += tokens[i].value; + } + return selector; +} + +function addCombinator( matcher, combinator, base ) { + var dir = combinator.dir, + skip = combinator.next, + key = skip || dir, + checkNonElements = base && key === "parentNode", + doneName = done++; + + return combinator.first ? + // Check against closest ancestor/preceding element + function( elem, context, xml ) { + while ( (elem = elem[ dir ]) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + return matcher( elem, context, xml ); + } + } + return false; + } : + + // Check against all ancestor/preceding elements + function( elem, context, xml ) { + var oldCache, uniqueCache, outerCache, + newCache = [ dirruns, doneName ]; + + // We can't set arbitrary data on XML nodes, so they don't benefit from combinator caching + if ( xml ) { + while ( (elem = elem[ dir ]) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + if ( matcher( elem, context, xml ) ) { + return true; + } + } + } + } else { + while ( (elem = elem[ dir ]) ) { + if ( elem.nodeType === 1 || checkNonElements ) { + outerCache = elem[ expando ] || (elem[ expando ] = {}); + + // Support: IE <9 only + // Defend against cloned attroperties (jQuery gh-1709) + uniqueCache = outerCache[ elem.uniqueID ] || (outerCache[ elem.uniqueID ] = {}); + + if ( skip && skip === elem.nodeName.toLowerCase() ) { + elem = elem[ dir ] || elem; + } else if ( (oldCache = uniqueCache[ key ]) && + oldCache[ 0 ] === dirruns && oldCache[ 1 ] === doneName ) { + + // Assign to newCache so results back-propagate to previous elements + return (newCache[ 2 ] = oldCache[ 2 ]); + } else { + // Reuse newcache so results back-propagate to previous elements + uniqueCache[ key ] = newCache; + + // A match means we're done; a fail means we have to keep checking + if ( (newCache[ 2 ] = matcher( elem, context, xml )) ) { + return true; + } + } + } + } + } + return false; + }; +} + +function elementMatcher( matchers ) { + return matchers.length > 1 ? + function( elem, context, xml ) { + var i = matchers.length; + while ( i-- ) { + if ( !matchers[i]( elem, context, xml ) ) { + return false; + } + } + return true; + } : + matchers[0]; +} + +function multipleContexts( selector, contexts, results ) { + var i = 0, + len = contexts.length; + for ( ; i < len; i++ ) { + Sizzle( selector, contexts[i], results ); + } + return results; +} + +function condense( unmatched, map, filter, context, xml ) { + var elem, + newUnmatched = [], + i = 0, + len = unmatched.length, + mapped = map != null; + + for ( ; i < len; i++ ) { + if ( (elem = unmatched[i]) ) { + if ( !filter || filter( elem, context, xml ) ) { + newUnmatched.push( elem ); + if ( mapped ) { + map.push( i ); + } + } + } + } + + return newUnmatched; +} + +function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) { + if ( postFilter && !postFilter[ expando ] ) { + postFilter = setMatcher( postFilter ); + } + if ( postFinder && !postFinder[ expando ] ) { + postFinder = setMatcher( postFinder, postSelector ); + } + return markFunction(function( seed, results, context, xml ) { + var temp, i, elem, + preMap = [], + postMap = [], + preexisting = results.length, + + // Get initial elements from seed or context + elems = seed || multipleContexts( selector || "*", context.nodeType ? [ context ] : context, [] ), + + // Prefilter to get matcher input, preserving a map for seed-results synchronization + matcherIn = preFilter && ( seed || !selector ) ? + condense( elems, preMap, preFilter, context, xml ) : + elems, + + matcherOut = matcher ? + // If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results, + postFinder || ( seed ? preFilter : preexisting || postFilter ) ? + + // ...intermediate processing is necessary + [] : + + // ...otherwise use results directly + results : + matcherIn; + + // Find primary matches + if ( matcher ) { + matcher( matcherIn, matcherOut, context, xml ); + } + + // Apply postFilter + if ( postFilter ) { + temp = condense( matcherOut, postMap ); + postFilter( temp, [], context, xml ); + + // Un-match failing elements by moving them back to matcherIn + i = temp.length; + while ( i-- ) { + if ( (elem = temp[i]) ) { + matcherOut[ postMap[i] ] = !(matcherIn[ postMap[i] ] = elem); + } + } + } + + if ( seed ) { + if ( postFinder || preFilter ) { + if ( postFinder ) { + // Get the final matcherOut by condensing this intermediate into postFinder contexts + temp = []; + i = matcherOut.length; + while ( i-- ) { + if ( (elem = matcherOut[i]) ) { + // Restore matcherIn since elem is not yet a final match + temp.push( (matcherIn[i] = elem) ); + } + } + postFinder( null, (matcherOut = []), temp, xml ); + } + + // Move matched elements from seed to results to keep them synchronized + i = matcherOut.length; + while ( i-- ) { + if ( (elem = matcherOut[i]) && + (temp = postFinder ? indexOf( seed, elem ) : preMap[i]) > -1 ) { + + seed[temp] = !(results[temp] = elem); + } + } + } + + // Add elements to results, through postFinder if defined + } else { + matcherOut = condense( + matcherOut === results ? + matcherOut.splice( preexisting, matcherOut.length ) : + matcherOut + ); + if ( postFinder ) { + postFinder( null, results, matcherOut, xml ); + } else { + push.apply( results, matcherOut ); + } + } + }); +} + +function matcherFromTokens( tokens ) { + var checkContext, matcher, j, + len = tokens.length, + leadingRelative = Expr.relative[ tokens[0].type ], + implicitRelative = leadingRelative || Expr.relative[" "], + i = leadingRelative ? 1 : 0, + + // The foundational matcher ensures that elements are reachable from top-level context(s) + matchContext = addCombinator( function( elem ) { + return elem === checkContext; + }, implicitRelative, true ), + matchAnyContext = addCombinator( function( elem ) { + return indexOf( checkContext, elem ) > -1; + }, implicitRelative, true ), + matchers = [ function( elem, context, xml ) { + var ret = ( !leadingRelative && ( xml || context !== outermostContext ) ) || ( + (checkContext = context).nodeType ? + matchContext( elem, context, xml ) : + matchAnyContext( elem, context, xml ) ); + // Avoid hanging onto element (issue #299) + checkContext = null; + return ret; + } ]; + + for ( ; i < len; i++ ) { + if ( (matcher = Expr.relative[ tokens[i].type ]) ) { + matchers = [ addCombinator(elementMatcher( matchers ), matcher) ]; + } else { + matcher = Expr.filter[ tokens[i].type ].apply( null, tokens[i].matches ); + + // Return special upon seeing a positional matcher + if ( matcher[ expando ] ) { + // Find the next relative operator (if any) for proper handling + j = ++i; + for ( ; j < len; j++ ) { + if ( Expr.relative[ tokens[j].type ] ) { + break; + } + } + return setMatcher( + i > 1 && elementMatcher( matchers ), + i > 1 && toSelector( + // If the preceding token was a descendant combinator, insert an implicit any-element `*` + tokens.slice( 0, i - 1 ).concat({ value: tokens[ i - 2 ].type === " " ? "*" : "" }) + ).replace( rtrim, "$1" ), + matcher, + i < j && matcherFromTokens( tokens.slice( i, j ) ), + j < len && matcherFromTokens( (tokens = tokens.slice( j )) ), + j < len && toSelector( tokens ) + ); + } + matchers.push( matcher ); + } + } + + return elementMatcher( matchers ); +} + +function matcherFromGroupMatchers( elementMatchers, setMatchers ) { + var bySet = setMatchers.length > 0, + byElement = elementMatchers.length > 0, + superMatcher = function( seed, context, xml, results, outermost ) { + var elem, j, matcher, + matchedCount = 0, + i = "0", + unmatched = seed && [], + setMatched = [], + contextBackup = outermostContext, + // We must always have either seed elements or outermost context + elems = seed || byElement && Expr.find["TAG"]( "*", outermost ), + // Use integer dirruns iff this is the outermost matcher + dirrunsUnique = (dirruns += contextBackup == null ? 1 : Math.random() || 0.1), + len = elems.length; + + if ( outermost ) { + outermostContext = context === document || context || outermost; + } + + // Add elements passing elementMatchers directly to results + // Support: IE<9, Safari + // Tolerate NodeList properties (IE: "length"; Safari: ) matching elements by id + for ( ; i !== len && (elem = elems[i]) != null; i++ ) { + if ( byElement && elem ) { + j = 0; + if ( !context && elem.ownerDocument !== document ) { + setDocument( elem ); + xml = !documentIsHTML; + } + while ( (matcher = elementMatchers[j++]) ) { + if ( matcher( elem, context || document, xml) ) { + results.push( elem ); + break; + } + } + if ( outermost ) { + dirruns = dirrunsUnique; + } + } + + // Track unmatched elements for set filters + if ( bySet ) { + // They will have gone through all possible matchers + if ( (elem = !matcher && elem) ) { + matchedCount--; + } + + // Lengthen the array for every element, matched or not + if ( seed ) { + unmatched.push( elem ); + } + } + } + + // `i` is now the count of elements visited above, and adding it to `matchedCount` + // makes the latter nonnegative. + matchedCount += i; + + // Apply set filters to unmatched elements + // NOTE: This can be skipped if there are no unmatched elements (i.e., `matchedCount` + // equals `i`), unless we didn't visit _any_ elements in the above loop because we have + // no element matchers and no seed. + // Incrementing an initially-string "0" `i` allows `i` to remain a string only in that + // case, which will result in a "00" `matchedCount` that differs from `i` but is also + // numerically zero. + if ( bySet && i !== matchedCount ) { + j = 0; + while ( (matcher = setMatchers[j++]) ) { + matcher( unmatched, setMatched, context, xml ); + } + + if ( seed ) { + // Reintegrate element matches to eliminate the need for sorting + if ( matchedCount > 0 ) { + while ( i-- ) { + if ( !(unmatched[i] || setMatched[i]) ) { + setMatched[i] = pop.call( results ); + } + } + } + + // Discard index placeholder values to get only actual matches + setMatched = condense( setMatched ); + } + + // Add matches to results + push.apply( results, setMatched ); + + // Seedless set matches succeeding multiple successful matchers stipulate sorting + if ( outermost && !seed && setMatched.length > 0 && + ( matchedCount + setMatchers.length ) > 1 ) { + + Sizzle.uniqueSort( results ); + } + } + + // Override manipulation of globals by nested matchers + if ( outermost ) { + dirruns = dirrunsUnique; + outermostContext = contextBackup; + } + + return unmatched; + }; + + return bySet ? + markFunction( superMatcher ) : + superMatcher; +} + +compile = Sizzle.compile = function( selector, match /* Internal Use Only */ ) { + var i, + setMatchers = [], + elementMatchers = [], + cached = compilerCache[ selector + " " ]; + + if ( !cached ) { + // Generate a function of recursive functions that can be used to check each element + if ( !match ) { + match = tokenize( selector ); + } + i = match.length; + while ( i-- ) { + cached = matcherFromTokens( match[i] ); + if ( cached[ expando ] ) { + setMatchers.push( cached ); + } else { + elementMatchers.push( cached ); + } + } + + // Cache the compiled function + cached = compilerCache( selector, matcherFromGroupMatchers( elementMatchers, setMatchers ) ); + + // Save selector and tokenization + cached.selector = selector; + } + return cached; +}; + +/** + * A low-level selection function that works with Sizzle's compiled + * selector functions + * @param {String|Function} selector A selector or a pre-compiled + * selector function built with Sizzle.compile + * @param {Element} context + * @param {Array} [results] + * @param {Array} [seed] A set of elements to match against + */ +select = Sizzle.select = function( selector, context, results, seed ) { + var i, tokens, token, type, find, + compiled = typeof selector === "function" && selector, + match = !seed && tokenize( (selector = compiled.selector || selector) ); + + results = results || []; + + // Try to minimize operations if there is only one selector in the list and no seed + // (the latter of which guarantees us context) + if ( match.length === 1 ) { + + // Reduce context if the leading compound selector is an ID + tokens = match[0] = match[0].slice( 0 ); + if ( tokens.length > 2 && (token = tokens[0]).type === "ID" && + context.nodeType === 9 && documentIsHTML && Expr.relative[ tokens[1].type ] ) { + + context = ( Expr.find["ID"]( token.matches[0].replace(runescape, funescape), context ) || [] )[0]; + if ( !context ) { + return results; + + // Precompiled matchers will still verify ancestry, so step up a level + } else if ( compiled ) { + context = context.parentNode; + } + + selector = selector.slice( tokens.shift().value.length ); + } + + // Fetch a seed set for right-to-left matching + i = matchExpr["needsContext"].test( selector ) ? 0 : tokens.length; + while ( i-- ) { + token = tokens[i]; + + // Abort if we hit a combinator + if ( Expr.relative[ (type = token.type) ] ) { + break; + } + if ( (find = Expr.find[ type ]) ) { + // Search, expanding context for leading sibling combinators + if ( (seed = find( + token.matches[0].replace( runescape, funescape ), + rsibling.test( tokens[0].type ) && testContext( context.parentNode ) || context + )) ) { + + // If seed is empty or no tokens remain, we can return early + tokens.splice( i, 1 ); + selector = seed.length && toSelector( tokens ); + if ( !selector ) { + push.apply( results, seed ); + return results; + } + + break; + } + } + } + } + + // Compile and execute a filtering function if one is not provided + // Provide `match` to avoid retokenization if we modified the selector above + ( compiled || compile( selector, match ) )( + seed, + context, + !documentIsHTML, + results, + !context || rsibling.test( selector ) && testContext( context.parentNode ) || context + ); + return results; +}; + +// One-time assignments + +// Sort stability +support.sortStable = expando.split("").sort( sortOrder ).join("") === expando; + +// Support: Chrome 14-35+ +// Always assume duplicates if they aren't passed to the comparison function +support.detectDuplicates = !!hasDuplicate; + +// Initialize against the default document +setDocument(); + +// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27) +// Detached nodes confoundingly follow *each other* +support.sortDetached = assert(function( el ) { + // Should return 1, but returns 4 (following) + return el.compareDocumentPosition( document.createElement("fieldset") ) & 1; +}); + +// Support: IE<8 +// Prevent attribute/property "interpolation" +// https://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx +if ( !assert(function( el ) { + el.innerHTML = ""; + return el.firstChild.getAttribute("href") === "#" ; +}) ) { + addHandle( "type|href|height|width", function( elem, name, isXML ) { + if ( !isXML ) { + return elem.getAttribute( name, name.toLowerCase() === "type" ? 1 : 2 ); + } + }); +} + +// Support: IE<9 +// Use defaultValue in place of getAttribute("value") +if ( !support.attributes || !assert(function( el ) { + el.innerHTML = ""; + el.firstChild.setAttribute( "value", "" ); + return el.firstChild.getAttribute( "value" ) === ""; +}) ) { + addHandle( "value", function( elem, name, isXML ) { + if ( !isXML && elem.nodeName.toLowerCase() === "input" ) { + return elem.defaultValue; + } + }); +} + +// Support: IE<9 +// Use getAttributeNode to fetch booleans when getAttribute lies +if ( !assert(function( el ) { + return el.getAttribute("disabled") == null; +}) ) { + addHandle( booleans, function( elem, name, isXML ) { + var val; + if ( !isXML ) { + return elem[ name ] === true ? name.toLowerCase() : + (val = elem.getAttributeNode( name )) && val.specified ? + val.value : + null; + } + }); +} + +return Sizzle; + +})( window ); + + + +jQuery.find = Sizzle; +jQuery.expr = Sizzle.selectors; + +// Deprecated +jQuery.expr[ ":" ] = jQuery.expr.pseudos; +jQuery.uniqueSort = jQuery.unique = Sizzle.uniqueSort; +jQuery.text = Sizzle.getText; +jQuery.isXMLDoc = Sizzle.isXML; +jQuery.contains = Sizzle.contains; +jQuery.escapeSelector = Sizzle.escape; + + + + +var dir = function( elem, dir, until ) { + var matched = [], + truncate = until !== undefined; + + while ( ( elem = elem[ dir ] ) && elem.nodeType !== 9 ) { + if ( elem.nodeType === 1 ) { + if ( truncate && jQuery( elem ).is( until ) ) { + break; + } + matched.push( elem ); + } + } + return matched; +}; + + +var siblings = function( n, elem ) { + var matched = []; + + for ( ; n; n = n.nextSibling ) { + if ( n.nodeType === 1 && n !== elem ) { + matched.push( n ); + } + } + + return matched; +}; + + +var rneedsContext = jQuery.expr.match.needsContext; + + + +function nodeName( elem, name ) { + + return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase(); + +}; +var rsingleTag = ( /^<([a-z][^\/\0>:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i ); + + + +var risSimple = /^.[^:#\[\.,]*$/; + +// Implement the identical functionality for filter and not +function winnow( elements, qualifier, not ) { + if ( jQuery.isFunction( qualifier ) ) { + return jQuery.grep( elements, function( elem, i ) { + return !!qualifier.call( elem, i, elem ) !== not; + } ); + } + + // Single element + if ( qualifier.nodeType ) { + return jQuery.grep( elements, function( elem ) { + return ( elem === qualifier ) !== not; + } ); + } + + // Arraylike of elements (jQuery, arguments, Array) + if ( typeof qualifier !== "string" ) { + return jQuery.grep( elements, function( elem ) { + return ( indexOf.call( qualifier, elem ) > -1 ) !== not; + } ); + } + + // Simple selector that can be filtered directly, removing non-Elements + if ( risSimple.test( qualifier ) ) { + return jQuery.filter( qualifier, elements, not ); + } + + // Complex selector, compare the two sets, removing non-Elements + qualifier = jQuery.filter( qualifier, elements ); + return jQuery.grep( elements, function( elem ) { + return ( indexOf.call( qualifier, elem ) > -1 ) !== not && elem.nodeType === 1; + } ); +} + +jQuery.filter = function( expr, elems, not ) { + var elem = elems[ 0 ]; + + if ( not ) { + expr = ":not(" + expr + ")"; + } + + if ( elems.length === 1 && elem.nodeType === 1 ) { + return jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : []; + } + + return jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) { + return elem.nodeType === 1; + } ) ); +}; + +jQuery.fn.extend( { + find: function( selector ) { + var i, ret, + len = this.length, + self = this; + + if ( typeof selector !== "string" ) { + return this.pushStack( jQuery( selector ).filter( function() { + for ( i = 0; i < len; i++ ) { + if ( jQuery.contains( self[ i ], this ) ) { + return true; + } + } + } ) ); + } + + ret = this.pushStack( [] ); + + for ( i = 0; i < len; i++ ) { + jQuery.find( selector, self[ i ], ret ); + } + + return len > 1 ? jQuery.uniqueSort( ret ) : ret; + }, + filter: function( selector ) { + return this.pushStack( winnow( this, selector || [], false ) ); + }, + not: function( selector ) { + return this.pushStack( winnow( this, selector || [], true ) ); + }, + is: function( selector ) { + return !!winnow( + this, + + // If this is a positional/relative selector, check membership in the returned set + // so $("p:first").is("p:last") won't return true for a doc with two "p". + typeof selector === "string" && rneedsContext.test( selector ) ? + jQuery( selector ) : + selector || [], + false + ).length; + } +} ); + + +// Initialize a jQuery object + + +// A central reference to the root jQuery(document) +var rootjQuery, + + // A simple way to check for HTML strings + // Prioritize #id over to avoid XSS via location.hash (#9521) + // Strict HTML recognition (#11290: must start with <) + // Shortcut simple #id case for speed + rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]+))$/, + + init = jQuery.fn.init = function( selector, context, root ) { + var match, elem; + + // HANDLE: $(""), $(null), $(undefined), $(false) + if ( !selector ) { + return this; + } + + // Method init() accepts an alternate rootjQuery + // so migrate can support jQuery.sub (gh-2101) + root = root || rootjQuery; + + // Handle HTML strings + if ( typeof selector === "string" ) { + if ( selector[ 0 ] === "<" && + selector[ selector.length - 1 ] === ">" && + selector.length >= 3 ) { + + // Assume that strings that start and end with <> are HTML and skip the regex check + match = [ null, selector, null ]; + + } else { + match = rquickExpr.exec( selector ); + } + + // Match html or make sure no context is specified for #id + if ( match && ( match[ 1 ] || !context ) ) { + + // HANDLE: $(html) -> $(array) + if ( match[ 1 ] ) { + context = context instanceof jQuery ? context[ 0 ] : context; + + // Option to run scripts is true for back-compat + // Intentionally let the error be thrown if parseHTML is not present + jQuery.merge( this, jQuery.parseHTML( + match[ 1 ], + context && context.nodeType ? context.ownerDocument || context : document, + true + ) ); + + // HANDLE: $(html, props) + if ( rsingleTag.test( match[ 1 ] ) && jQuery.isPlainObject( context ) ) { + for ( match in context ) { + + // Properties of context are called as methods if possible + if ( jQuery.isFunction( this[ match ] ) ) { + this[ match ]( context[ match ] ); + + // ...and otherwise set as attributes + } else { + this.attr( match, context[ match ] ); + } + } + } + + return this; + + // HANDLE: $(#id) + } else { + elem = document.getElementById( match[ 2 ] ); + + if ( elem ) { + + // Inject the element directly into the jQuery object + this[ 0 ] = elem; + this.length = 1; + } + return this; + } + + // HANDLE: $(expr, $(...)) + } else if ( !context || context.jquery ) { + return ( context || root ).find( selector ); + + // HANDLE: $(expr, context) + // (which is just equivalent to: $(context).find(expr) + } else { + return this.constructor( context ).find( selector ); + } + + // HANDLE: $(DOMElement) + } else if ( selector.nodeType ) { + this[ 0 ] = selector; + this.length = 1; + return this; + + // HANDLE: $(function) + // Shortcut for document ready + } else if ( jQuery.isFunction( selector ) ) { + return root.ready !== undefined ? + root.ready( selector ) : + + // Execute immediately if ready is not present + selector( jQuery ); + } + + return jQuery.makeArray( selector, this ); + }; + +// Give the init function the jQuery prototype for later instantiation +init.prototype = jQuery.fn; + +// Initialize central reference +rootjQuery = jQuery( document ); + + +var rparentsprev = /^(?:parents|prev(?:Until|All))/, + + // Methods guaranteed to produce a unique set when starting from a unique set + guaranteedUnique = { + children: true, + contents: true, + next: true, + prev: true + }; + +jQuery.fn.extend( { + has: function( target ) { + var targets = jQuery( target, this ), + l = targets.length; + + return this.filter( function() { + var i = 0; + for ( ; i < l; i++ ) { + if ( jQuery.contains( this, targets[ i ] ) ) { + return true; + } + } + } ); + }, + + closest: function( selectors, context ) { + var cur, + i = 0, + l = this.length, + matched = [], + targets = typeof selectors !== "string" && jQuery( selectors ); + + // Positional selectors never match, since there's no _selection_ context + if ( !rneedsContext.test( selectors ) ) { + for ( ; i < l; i++ ) { + for ( cur = this[ i ]; cur && cur !== context; cur = cur.parentNode ) { + + // Always skip document fragments + if ( cur.nodeType < 11 && ( targets ? + targets.index( cur ) > -1 : + + // Don't pass non-elements to Sizzle + cur.nodeType === 1 && + jQuery.find.matchesSelector( cur, selectors ) ) ) { + + matched.push( cur ); + break; + } + } + } + } + + return this.pushStack( matched.length > 1 ? jQuery.uniqueSort( matched ) : matched ); + }, + + // Determine the position of an element within the set + index: function( elem ) { + + // No argument, return index in parent + if ( !elem ) { + return ( this[ 0 ] && this[ 0 ].parentNode ) ? this.first().prevAll().length : -1; + } + + // Index in selector + if ( typeof elem === "string" ) { + return indexOf.call( jQuery( elem ), this[ 0 ] ); + } + + // Locate the position of the desired element + return indexOf.call( this, + + // If it receives a jQuery object, the first element is used + elem.jquery ? elem[ 0 ] : elem + ); + }, + + add: function( selector, context ) { + return this.pushStack( + jQuery.uniqueSort( + jQuery.merge( this.get(), jQuery( selector, context ) ) + ) + ); + }, + + addBack: function( selector ) { + return this.add( selector == null ? + this.prevObject : this.prevObject.filter( selector ) + ); + } +} ); + +function sibling( cur, dir ) { + while ( ( cur = cur[ dir ] ) && cur.nodeType !== 1 ) {} + return cur; +} + +jQuery.each( { + parent: function( elem ) { + var parent = elem.parentNode; + return parent && parent.nodeType !== 11 ? parent : null; + }, + parents: function( elem ) { + return dir( elem, "parentNode" ); + }, + parentsUntil: function( elem, i, until ) { + return dir( elem, "parentNode", until ); + }, + next: function( elem ) { + return sibling( elem, "nextSibling" ); + }, + prev: function( elem ) { + return sibling( elem, "previousSibling" ); + }, + nextAll: function( elem ) { + return dir( elem, "nextSibling" ); + }, + prevAll: function( elem ) { + return dir( elem, "previousSibling" ); + }, + nextUntil: function( elem, i, until ) { + return dir( elem, "nextSibling", until ); + }, + prevUntil: function( elem, i, until ) { + return dir( elem, "previousSibling", until ); + }, + siblings: function( elem ) { + return siblings( ( elem.parentNode || {} ).firstChild, elem ); + }, + children: function( elem ) { + return siblings( elem.firstChild ); + }, + contents: function( elem ) { + if ( nodeName( elem, "iframe" ) ) { + return elem.contentDocument; + } + + // Support: IE 9 - 11 only, iOS 7 only, Android Browser <=4.3 only + // Treat the template element as a regular one in browsers that + // don't support it. + if ( nodeName( elem, "template" ) ) { + elem = elem.content || elem; + } + + return jQuery.merge( [], elem.childNodes ); + } +}, function( name, fn ) { + jQuery.fn[ name ] = function( until, selector ) { + var matched = jQuery.map( this, fn, until ); + + if ( name.slice( -5 ) !== "Until" ) { + selector = until; + } + + if ( selector && typeof selector === "string" ) { + matched = jQuery.filter( selector, matched ); + } + + if ( this.length > 1 ) { + + // Remove duplicates + if ( !guaranteedUnique[ name ] ) { + jQuery.uniqueSort( matched ); + } + + // Reverse order for parents* and prev-derivatives + if ( rparentsprev.test( name ) ) { + matched.reverse(); + } + } + + return this.pushStack( matched ); + }; +} ); +var rnothtmlwhite = ( /[^\x20\t\r\n\f]+/g ); + + + +// Convert String-formatted options into Object-formatted ones +function createOptions( options ) { + var object = {}; + jQuery.each( options.match( rnothtmlwhite ) || [], function( _, flag ) { + object[ flag ] = true; + } ); + return object; +} + +/* + * Create a callback list using the following parameters: + * + * options: an optional list of space-separated options that will change how + * the callback list behaves or a more traditional option object + * + * By default a callback list will act like an event callback list and can be + * "fired" multiple times. + * + * Possible options: + * + * once: will ensure the callback list can only be fired once (like a Deferred) + * + * memory: will keep track of previous values and will call any callback added + * after the list has been fired right away with the latest "memorized" + * values (like a Deferred) + * + * unique: will ensure a callback can only be added once (no duplicate in the list) + * + * stopOnFalse: interrupt callings when a callback returns false + * + */ +jQuery.Callbacks = function( options ) { + + // Convert options from String-formatted to Object-formatted if needed + // (we check in cache first) + options = typeof options === "string" ? + createOptions( options ) : + jQuery.extend( {}, options ); + + var // Flag to know if list is currently firing + firing, + + // Last fire value for non-forgettable lists + memory, + + // Flag to know if list was already fired + fired, + + // Flag to prevent firing + locked, + + // Actual callback list + list = [], + + // Queue of execution data for repeatable lists + queue = [], + + // Index of currently firing callback (modified by add/remove as needed) + firingIndex = -1, + + // Fire callbacks + fire = function() { + + // Enforce single-firing + locked = locked || options.once; + + // Execute callbacks for all pending executions, + // respecting firingIndex overrides and runtime changes + fired = firing = true; + for ( ; queue.length; firingIndex = -1 ) { + memory = queue.shift(); + while ( ++firingIndex < list.length ) { + + // Run callback and check for early termination + if ( list[ firingIndex ].apply( memory[ 0 ], memory[ 1 ] ) === false && + options.stopOnFalse ) { + + // Jump to end and forget the data so .add doesn't re-fire + firingIndex = list.length; + memory = false; + } + } + } + + // Forget the data if we're done with it + if ( !options.memory ) { + memory = false; + } + + firing = false; + + // Clean up if we're done firing for good + if ( locked ) { + + // Keep an empty list if we have data for future add calls + if ( memory ) { + list = []; + + // Otherwise, this object is spent + } else { + list = ""; + } + } + }, + + // Actual Callbacks object + self = { + + // Add a callback or a collection of callbacks to the list + add: function() { + if ( list ) { + + // If we have memory from a past run, we should fire after adding + if ( memory && !firing ) { + firingIndex = list.length - 1; + queue.push( memory ); + } + + ( function add( args ) { + jQuery.each( args, function( _, arg ) { + if ( jQuery.isFunction( arg ) ) { + if ( !options.unique || !self.has( arg ) ) { + list.push( arg ); + } + } else if ( arg && arg.length && jQuery.type( arg ) !== "string" ) { + + // Inspect recursively + add( arg ); + } + } ); + } )( arguments ); + + if ( memory && !firing ) { + fire(); + } + } + return this; + }, + + // Remove a callback from the list + remove: function() { + jQuery.each( arguments, function( _, arg ) { + var index; + while ( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) { + list.splice( index, 1 ); + + // Handle firing indexes + if ( index <= firingIndex ) { + firingIndex--; + } + } + } ); + return this; + }, + + // Check if a given callback is in the list. + // If no argument is given, return whether or not list has callbacks attached. + has: function( fn ) { + return fn ? + jQuery.inArray( fn, list ) > -1 : + list.length > 0; + }, + + // Remove all callbacks from the list + empty: function() { + if ( list ) { + list = []; + } + return this; + }, + + // Disable .fire and .add + // Abort any current/pending executions + // Clear all callbacks and values + disable: function() { + locked = queue = []; + list = memory = ""; + return this; + }, + disabled: function() { + return !list; + }, + + // Disable .fire + // Also disable .add unless we have memory (since it would have no effect) + // Abort any pending executions + lock: function() { + locked = queue = []; + if ( !memory && !firing ) { + list = memory = ""; + } + return this; + }, + locked: function() { + return !!locked; + }, + + // Call all callbacks with the given context and arguments + fireWith: function( context, args ) { + if ( !locked ) { + args = args || []; + args = [ context, args.slice ? args.slice() : args ]; + queue.push( args ); + if ( !firing ) { + fire(); + } + } + return this; + }, + + // Call all the callbacks with the given arguments + fire: function() { + self.fireWith( this, arguments ); + return this; + }, + + // To know if the callbacks have already been called at least once + fired: function() { + return !!fired; + } + }; + + return self; +}; + + +function Identity( v ) { + return v; +} +function Thrower( ex ) { + throw ex; +} + +function adoptValue( value, resolve, reject, noValue ) { + var method; + + try { + + // Check for promise aspect first to privilege synchronous behavior + if ( value && jQuery.isFunction( ( method = value.promise ) ) ) { + method.call( value ).done( resolve ).fail( reject ); + + // Other thenables + } else if ( value && jQuery.isFunction( ( method = value.then ) ) ) { + method.call( value, resolve, reject ); + + // Other non-thenables + } else { + + // Control `resolve` arguments by letting Array#slice cast boolean `noValue` to integer: + // * false: [ value ].slice( 0 ) => resolve( value ) + // * true: [ value ].slice( 1 ) => resolve() + resolve.apply( undefined, [ value ].slice( noValue ) ); + } + + // For Promises/A+, convert exceptions into rejections + // Since jQuery.when doesn't unwrap thenables, we can skip the extra checks appearing in + // Deferred#then to conditionally suppress rejection. + } catch ( value ) { + + // Support: Android 4.0 only + // Strict mode functions invoked without .call/.apply get global-object context + reject.apply( undefined, [ value ] ); + } +} + +jQuery.extend( { + + Deferred: function( func ) { + var tuples = [ + + // action, add listener, callbacks, + // ... .then handlers, argument index, [final state] + [ "notify", "progress", jQuery.Callbacks( "memory" ), + jQuery.Callbacks( "memory" ), 2 ], + [ "resolve", "done", jQuery.Callbacks( "once memory" ), + jQuery.Callbacks( "once memory" ), 0, "resolved" ], + [ "reject", "fail", jQuery.Callbacks( "once memory" ), + jQuery.Callbacks( "once memory" ), 1, "rejected" ] + ], + state = "pending", + promise = { + state: function() { + return state; + }, + always: function() { + deferred.done( arguments ).fail( arguments ); + return this; + }, + "catch": function( fn ) { + return promise.then( null, fn ); + }, + + // Keep pipe for back-compat + pipe: function( /* fnDone, fnFail, fnProgress */ ) { + var fns = arguments; + + return jQuery.Deferred( function( newDefer ) { + jQuery.each( tuples, function( i, tuple ) { + + // Map tuples (progress, done, fail) to arguments (done, fail, progress) + var fn = jQuery.isFunction( fns[ tuple[ 4 ] ] ) && fns[ tuple[ 4 ] ]; + + // deferred.progress(function() { bind to newDefer or newDefer.notify }) + // deferred.done(function() { bind to newDefer or newDefer.resolve }) + // deferred.fail(function() { bind to newDefer or newDefer.reject }) + deferred[ tuple[ 1 ] ]( function() { + var returned = fn && fn.apply( this, arguments ); + if ( returned && jQuery.isFunction( returned.promise ) ) { + returned.promise() + .progress( newDefer.notify ) + .done( newDefer.resolve ) + .fail( newDefer.reject ); + } else { + newDefer[ tuple[ 0 ] + "With" ]( + this, + fn ? [ returned ] : arguments + ); + } + } ); + } ); + fns = null; + } ).promise(); + }, + then: function( onFulfilled, onRejected, onProgress ) { + var maxDepth = 0; + function resolve( depth, deferred, handler, special ) { + return function() { + var that = this, + args = arguments, + mightThrow = function() { + var returned, then; + + // Support: Promises/A+ section 2.3.3.3.3 + // https://promisesaplus.com/#point-59 + // Ignore double-resolution attempts + if ( depth < maxDepth ) { + return; + } + + returned = handler.apply( that, args ); + + // Support: Promises/A+ section 2.3.1 + // https://promisesaplus.com/#point-48 + if ( returned === deferred.promise() ) { + throw new TypeError( "Thenable self-resolution" ); + } + + // Support: Promises/A+ sections 2.3.3.1, 3.5 + // https://promisesaplus.com/#point-54 + // https://promisesaplus.com/#point-75 + // Retrieve `then` only once + then = returned && + + // Support: Promises/A+ section 2.3.4 + // https://promisesaplus.com/#point-64 + // Only check objects and functions for thenability + ( typeof returned === "object" || + typeof returned === "function" ) && + returned.then; + + // Handle a returned thenable + if ( jQuery.isFunction( then ) ) { + + // Special processors (notify) just wait for resolution + if ( special ) { + then.call( + returned, + resolve( maxDepth, deferred, Identity, special ), + resolve( maxDepth, deferred, Thrower, special ) + ); + + // Normal processors (resolve) also hook into progress + } else { + + // ...and disregard older resolution values + maxDepth++; + + then.call( + returned, + resolve( maxDepth, deferred, Identity, special ), + resolve( maxDepth, deferred, Thrower, special ), + resolve( maxDepth, deferred, Identity, + deferred.notifyWith ) + ); + } + + // Handle all other returned values + } else { + + // Only substitute handlers pass on context + // and multiple values (non-spec behavior) + if ( handler !== Identity ) { + that = undefined; + args = [ returned ]; + } + + // Process the value(s) + // Default process is resolve + ( special || deferred.resolveWith )( that, args ); + } + }, + + // Only normal processors (resolve) catch and reject exceptions + process = special ? + mightThrow : + function() { + try { + mightThrow(); + } catch ( e ) { + + if ( jQuery.Deferred.exceptionHook ) { + jQuery.Deferred.exceptionHook( e, + process.stackTrace ); + } + + // Support: Promises/A+ section 2.3.3.3.4.1 + // https://promisesaplus.com/#point-61 + // Ignore post-resolution exceptions + if ( depth + 1 >= maxDepth ) { + + // Only substitute handlers pass on context + // and multiple values (non-spec behavior) + if ( handler !== Thrower ) { + that = undefined; + args = [ e ]; + } + + deferred.rejectWith( that, args ); + } + } + }; + + // Support: Promises/A+ section 2.3.3.3.1 + // https://promisesaplus.com/#point-57 + // Re-resolve promises immediately to dodge false rejection from + // subsequent errors + if ( depth ) { + process(); + } else { + + // Call an optional hook to record the stack, in case of exception + // since it's otherwise lost when execution goes async + if ( jQuery.Deferred.getStackHook ) { + process.stackTrace = jQuery.Deferred.getStackHook(); + } + window.setTimeout( process ); + } + }; + } + + return jQuery.Deferred( function( newDefer ) { + + // progress_handlers.add( ... ) + tuples[ 0 ][ 3 ].add( + resolve( + 0, + newDefer, + jQuery.isFunction( onProgress ) ? + onProgress : + Identity, + newDefer.notifyWith + ) + ); + + // fulfilled_handlers.add( ... ) + tuples[ 1 ][ 3 ].add( + resolve( + 0, + newDefer, + jQuery.isFunction( onFulfilled ) ? + onFulfilled : + Identity + ) + ); + + // rejected_handlers.add( ... ) + tuples[ 2 ][ 3 ].add( + resolve( + 0, + newDefer, + jQuery.isFunction( onRejected ) ? + onRejected : + Thrower + ) + ); + } ).promise(); + }, + + // Get a promise for this deferred + // If obj is provided, the promise aspect is added to the object + promise: function( obj ) { + return obj != null ? jQuery.extend( obj, promise ) : promise; + } + }, + deferred = {}; + + // Add list-specific methods + jQuery.each( tuples, function( i, tuple ) { + var list = tuple[ 2 ], + stateString = tuple[ 5 ]; + + // promise.progress = list.add + // promise.done = list.add + // promise.fail = list.add + promise[ tuple[ 1 ] ] = list.add; + + // Handle state + if ( stateString ) { + list.add( + function() { + + // state = "resolved" (i.e., fulfilled) + // state = "rejected" + state = stateString; + }, + + // rejected_callbacks.disable + // fulfilled_callbacks.disable + tuples[ 3 - i ][ 2 ].disable, + + // progress_callbacks.lock + tuples[ 0 ][ 2 ].lock + ); + } + + // progress_handlers.fire + // fulfilled_handlers.fire + // rejected_handlers.fire + list.add( tuple[ 3 ].fire ); + + // deferred.notify = function() { deferred.notifyWith(...) } + // deferred.resolve = function() { deferred.resolveWith(...) } + // deferred.reject = function() { deferred.rejectWith(...) } + deferred[ tuple[ 0 ] ] = function() { + deferred[ tuple[ 0 ] + "With" ]( this === deferred ? undefined : this, arguments ); + return this; + }; + + // deferred.notifyWith = list.fireWith + // deferred.resolveWith = list.fireWith + // deferred.rejectWith = list.fireWith + deferred[ tuple[ 0 ] + "With" ] = list.fireWith; + } ); + + // Make the deferred a promise + promise.promise( deferred ); + + // Call given func if any + if ( func ) { + func.call( deferred, deferred ); + } + + // All done! + return deferred; + }, + + // Deferred helper + when: function( singleValue ) { + var + + // count of uncompleted subordinates + remaining = arguments.length, + + // count of unprocessed arguments + i = remaining, + + // subordinate fulfillment data + resolveContexts = Array( i ), + resolveValues = slice.call( arguments ), + + // the master Deferred + master = jQuery.Deferred(), + + // subordinate callback factory + updateFunc = function( i ) { + return function( value ) { + resolveContexts[ i ] = this; + resolveValues[ i ] = arguments.length > 1 ? slice.call( arguments ) : value; + if ( !( --remaining ) ) { + master.resolveWith( resolveContexts, resolveValues ); + } + }; + }; + + // Single- and empty arguments are adopted like Promise.resolve + if ( remaining <= 1 ) { + adoptValue( singleValue, master.done( updateFunc( i ) ).resolve, master.reject, + !remaining ); + + // Use .then() to unwrap secondary thenables (cf. gh-3000) + if ( master.state() === "pending" || + jQuery.isFunction( resolveValues[ i ] && resolveValues[ i ].then ) ) { + + return master.then(); + } + } + + // Multiple arguments are aggregated like Promise.all array elements + while ( i-- ) { + adoptValue( resolveValues[ i ], updateFunc( i ), master.reject ); + } + + return master.promise(); + } +} ); + + +// These usually indicate a programmer mistake during development, +// warn about them ASAP rather than swallowing them by default. +var rerrorNames = /^(Eval|Internal|Range|Reference|Syntax|Type|URI)Error$/; + +jQuery.Deferred.exceptionHook = function( error, stack ) { + + // Support: IE 8 - 9 only + // Console exists when dev tools are open, which can happen at any time + if ( window.console && window.console.warn && error && rerrorNames.test( error.name ) ) { + window.console.warn( "jQuery.Deferred exception: " + error.message, error.stack, stack ); + } +}; + + + + +jQuery.readyException = function( error ) { + window.setTimeout( function() { + throw error; + } ); +}; + + + + +// The deferred used on DOM ready +var readyList = jQuery.Deferred(); + +jQuery.fn.ready = function( fn ) { + + readyList + .then( fn ) + + // Wrap jQuery.readyException in a function so that the lookup + // happens at the time of error handling instead of callback + // registration. + .catch( function( error ) { + jQuery.readyException( error ); + } ); + + return this; +}; + +jQuery.extend( { + + // Is the DOM ready to be used? Set to true once it occurs. + isReady: false, + + // A counter to track how many items to wait for before + // the ready event fires. See #6781 + readyWait: 1, + + // Handle when the DOM is ready + ready: function( wait ) { + + // Abort if there are pending holds or we're already ready + if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) { + return; + } + + // Remember that the DOM is ready + jQuery.isReady = true; + + // If a normal DOM Ready event fired, decrement, and wait if need be + if ( wait !== true && --jQuery.readyWait > 0 ) { + return; + } + + // If there are functions bound, to execute + readyList.resolveWith( document, [ jQuery ] ); + } +} ); + +jQuery.ready.then = readyList.then; + +// The ready event handler and self cleanup method +function completed() { + document.removeEventListener( "DOMContentLoaded", completed ); + window.removeEventListener( "load", completed ); + jQuery.ready(); +} + +// Catch cases where $(document).ready() is called +// after the browser event has already occurred. +// Support: IE <=9 - 10 only +// Older IE sometimes signals "interactive" too soon +if ( document.readyState === "complete" || + ( document.readyState !== "loading" && !document.documentElement.doScroll ) ) { + + // Handle it asynchronously to allow scripts the opportunity to delay ready + window.setTimeout( jQuery.ready ); + +} else { + + // Use the handy event callback + document.addEventListener( "DOMContentLoaded", completed ); + + // A fallback to window.onload, that will always work + window.addEventListener( "load", completed ); +} + + + + +// Multifunctional method to get and set values of a collection +// The value/s can optionally be executed if it's a function +var access = function( elems, fn, key, value, chainable, emptyGet, raw ) { + var i = 0, + len = elems.length, + bulk = key == null; + + // Sets many values + if ( jQuery.type( key ) === "object" ) { + chainable = true; + for ( i in key ) { + access( elems, fn, i, key[ i ], true, emptyGet, raw ); + } + + // Sets one value + } else if ( value !== undefined ) { + chainable = true; + + if ( !jQuery.isFunction( value ) ) { + raw = true; + } + + if ( bulk ) { + + // Bulk operations run against the entire set + if ( raw ) { + fn.call( elems, value ); + fn = null; + + // ...except when executing function values + } else { + bulk = fn; + fn = function( elem, key, value ) { + return bulk.call( jQuery( elem ), value ); + }; + } + } + + if ( fn ) { + for ( ; i < len; i++ ) { + fn( + elems[ i ], key, raw ? + value : + value.call( elems[ i ], i, fn( elems[ i ], key ) ) + ); + } + } + } + + if ( chainable ) { + return elems; + } + + // Gets + if ( bulk ) { + return fn.call( elems ); + } + + return len ? fn( elems[ 0 ], key ) : emptyGet; +}; +var acceptData = function( owner ) { + + // Accepts only: + // - Node + // - Node.ELEMENT_NODE + // - Node.DOCUMENT_NODE + // - Object + // - Any + return owner.nodeType === 1 || owner.nodeType === 9 || !( +owner.nodeType ); +}; + + + + +function Data() { + this.expando = jQuery.expando + Data.uid++; +} + +Data.uid = 1; + +Data.prototype = { + + cache: function( owner ) { + + // Check if the owner object already has a cache + var value = owner[ this.expando ]; + + // If not, create one + if ( !value ) { + value = {}; + + // We can accept data for non-element nodes in modern browsers, + // but we should not, see #8335. + // Always return an empty object. + if ( acceptData( owner ) ) { + + // If it is a node unlikely to be stringify-ed or looped over + // use plain assignment + if ( owner.nodeType ) { + owner[ this.expando ] = value; + + // Otherwise secure it in a non-enumerable property + // configurable must be true to allow the property to be + // deleted when data is removed + } else { + Object.defineProperty( owner, this.expando, { + value: value, + configurable: true + } ); + } + } + } + + return value; + }, + set: function( owner, data, value ) { + var prop, + cache = this.cache( owner ); + + // Handle: [ owner, key, value ] args + // Always use camelCase key (gh-2257) + if ( typeof data === "string" ) { + cache[ jQuery.camelCase( data ) ] = value; + + // Handle: [ owner, { properties } ] args + } else { + + // Copy the properties one-by-one to the cache object + for ( prop in data ) { + cache[ jQuery.camelCase( prop ) ] = data[ prop ]; + } + } + return cache; + }, + get: function( owner, key ) { + return key === undefined ? + this.cache( owner ) : + + // Always use camelCase key (gh-2257) + owner[ this.expando ] && owner[ this.expando ][ jQuery.camelCase( key ) ]; + }, + access: function( owner, key, value ) { + + // In cases where either: + // + // 1. No key was specified + // 2. A string key was specified, but no value provided + // + // Take the "read" path and allow the get method to determine + // which value to return, respectively either: + // + // 1. The entire cache object + // 2. The data stored at the key + // + if ( key === undefined || + ( ( key && typeof key === "string" ) && value === undefined ) ) { + + return this.get( owner, key ); + } + + // When the key is not a string, or both a key and value + // are specified, set or extend (existing objects) with either: + // + // 1. An object of properties + // 2. A key and value + // + this.set( owner, key, value ); + + // Since the "set" path can have two possible entry points + // return the expected data based on which path was taken[*] + return value !== undefined ? value : key; + }, + remove: function( owner, key ) { + var i, + cache = owner[ this.expando ]; + + if ( cache === undefined ) { + return; + } + + if ( key !== undefined ) { + + // Support array or space separated string of keys + if ( Array.isArray( key ) ) { + + // If key is an array of keys... + // We always set camelCase keys, so remove that. + key = key.map( jQuery.camelCase ); + } else { + key = jQuery.camelCase( key ); + + // If a key with the spaces exists, use it. + // Otherwise, create an array by matching non-whitespace + key = key in cache ? + [ key ] : + ( key.match( rnothtmlwhite ) || [] ); + } + + i = key.length; + + while ( i-- ) { + delete cache[ key[ i ] ]; + } + } + + // Remove the expando if there's no more data + if ( key === undefined || jQuery.isEmptyObject( cache ) ) { + + // Support: Chrome <=35 - 45 + // Webkit & Blink performance suffers when deleting properties + // from DOM nodes, so set to undefined instead + // https://bugs.chromium.org/p/chromium/issues/detail?id=378607 (bug restricted) + if ( owner.nodeType ) { + owner[ this.expando ] = undefined; + } else { + delete owner[ this.expando ]; + } + } + }, + hasData: function( owner ) { + var cache = owner[ this.expando ]; + return cache !== undefined && !jQuery.isEmptyObject( cache ); + } +}; +var dataPriv = new Data(); + +var dataUser = new Data(); + + + +// Implementation Summary +// +// 1. Enforce API surface and semantic compatibility with 1.9.x branch +// 2. Improve the module's maintainability by reducing the storage +// paths to a single mechanism. +// 3. Use the same single mechanism to support "private" and "user" data. +// 4. _Never_ expose "private" data to user code (TODO: Drop _data, _removeData) +// 5. Avoid exposing implementation details on user objects (eg. expando properties) +// 6. Provide a clear path for implementation upgrade to WeakMap in 2014 + +var rbrace = /^(?:\{[\w\W]*\}|\[[\w\W]*\])$/, + rmultiDash = /[A-Z]/g; + +function getData( data ) { + if ( data === "true" ) { + return true; + } + + if ( data === "false" ) { + return false; + } + + if ( data === "null" ) { + return null; + } + + // Only convert to a number if it doesn't change the string + if ( data === +data + "" ) { + return +data; + } + + if ( rbrace.test( data ) ) { + return JSON.parse( data ); + } + + return data; +} + +function dataAttr( elem, key, data ) { + var name; + + // If nothing was found internally, try to fetch any + // data from the HTML5 data-* attribute + if ( data === undefined && elem.nodeType === 1 ) { + name = "data-" + key.replace( rmultiDash, "-$&" ).toLowerCase(); + data = elem.getAttribute( name ); + + if ( typeof data === "string" ) { + try { + data = getData( data ); + } catch ( e ) {} + + // Make sure we set the data so it isn't changed later + dataUser.set( elem, key, data ); + } else { + data = undefined; + } + } + return data; +} + +jQuery.extend( { + hasData: function( elem ) { + return dataUser.hasData( elem ) || dataPriv.hasData( elem ); + }, + + data: function( elem, name, data ) { + return dataUser.access( elem, name, data ); + }, + + removeData: function( elem, name ) { + dataUser.remove( elem, name ); + }, + + // TODO: Now that all calls to _data and _removeData have been replaced + // with direct calls to dataPriv methods, these can be deprecated. + _data: function( elem, name, data ) { + return dataPriv.access( elem, name, data ); + }, + + _removeData: function( elem, name ) { + dataPriv.remove( elem, name ); + } +} ); + +jQuery.fn.extend( { + data: function( key, value ) { + var i, name, data, + elem = this[ 0 ], + attrs = elem && elem.attributes; + + // Gets all values + if ( key === undefined ) { + if ( this.length ) { + data = dataUser.get( elem ); + + if ( elem.nodeType === 1 && !dataPriv.get( elem, "hasDataAttrs" ) ) { + i = attrs.length; + while ( i-- ) { + + // Support: IE 11 only + // The attrs elements can be null (#14894) + if ( attrs[ i ] ) { + name = attrs[ i ].name; + if ( name.indexOf( "data-" ) === 0 ) { + name = jQuery.camelCase( name.slice( 5 ) ); + dataAttr( elem, name, data[ name ] ); + } + } + } + dataPriv.set( elem, "hasDataAttrs", true ); + } + } + + return data; + } + + // Sets multiple values + if ( typeof key === "object" ) { + return this.each( function() { + dataUser.set( this, key ); + } ); + } + + return access( this, function( value ) { + var data; + + // The calling jQuery object (element matches) is not empty + // (and therefore has an element appears at this[ 0 ]) and the + // `value` parameter was not undefined. An empty jQuery object + // will result in `undefined` for elem = this[ 0 ] which will + // throw an exception if an attempt to read a data cache is made. + if ( elem && value === undefined ) { + + // Attempt to get data from the cache + // The key will always be camelCased in Data + data = dataUser.get( elem, key ); + if ( data !== undefined ) { + return data; + } + + // Attempt to "discover" the data in + // HTML5 custom data-* attrs + data = dataAttr( elem, key ); + if ( data !== undefined ) { + return data; + } + + // We tried really hard, but the data doesn't exist. + return; + } + + // Set the data... + this.each( function() { + + // We always store the camelCased key + dataUser.set( this, key, value ); + } ); + }, null, value, arguments.length > 1, null, true ); + }, + + removeData: function( key ) { + return this.each( function() { + dataUser.remove( this, key ); + } ); + } +} ); + + +jQuery.extend( { + queue: function( elem, type, data ) { + var queue; + + if ( elem ) { + type = ( type || "fx" ) + "queue"; + queue = dataPriv.get( elem, type ); + + // Speed up dequeue by getting out quickly if this is just a lookup + if ( data ) { + if ( !queue || Array.isArray( data ) ) { + queue = dataPriv.access( elem, type, jQuery.makeArray( data ) ); + } else { + queue.push( data ); + } + } + return queue || []; + } + }, + + dequeue: function( elem, type ) { + type = type || "fx"; + + var queue = jQuery.queue( elem, type ), + startLength = queue.length, + fn = queue.shift(), + hooks = jQuery._queueHooks( elem, type ), + next = function() { + jQuery.dequeue( elem, type ); + }; + + // If the fx queue is dequeued, always remove the progress sentinel + if ( fn === "inprogress" ) { + fn = queue.shift(); + startLength--; + } + + if ( fn ) { + + // Add a progress sentinel to prevent the fx queue from being + // automatically dequeued + if ( type === "fx" ) { + queue.unshift( "inprogress" ); + } + + // Clear up the last queue stop function + delete hooks.stop; + fn.call( elem, next, hooks ); + } + + if ( !startLength && hooks ) { + hooks.empty.fire(); + } + }, + + // Not public - generate a queueHooks object, or return the current one + _queueHooks: function( elem, type ) { + var key = type + "queueHooks"; + return dataPriv.get( elem, key ) || dataPriv.access( elem, key, { + empty: jQuery.Callbacks( "once memory" ).add( function() { + dataPriv.remove( elem, [ type + "queue", key ] ); + } ) + } ); + } +} ); + +jQuery.fn.extend( { + queue: function( type, data ) { + var setter = 2; + + if ( typeof type !== "string" ) { + data = type; + type = "fx"; + setter--; + } + + if ( arguments.length < setter ) { + return jQuery.queue( this[ 0 ], type ); + } + + return data === undefined ? + this : + this.each( function() { + var queue = jQuery.queue( this, type, data ); + + // Ensure a hooks for this queue + jQuery._queueHooks( this, type ); + + if ( type === "fx" && queue[ 0 ] !== "inprogress" ) { + jQuery.dequeue( this, type ); + } + } ); + }, + dequeue: function( type ) { + return this.each( function() { + jQuery.dequeue( this, type ); + } ); + }, + clearQueue: function( type ) { + return this.queue( type || "fx", [] ); + }, + + // Get a promise resolved when queues of a certain type + // are emptied (fx is the type by default) + promise: function( type, obj ) { + var tmp, + count = 1, + defer = jQuery.Deferred(), + elements = this, + i = this.length, + resolve = function() { + if ( !( --count ) ) { + defer.resolveWith( elements, [ elements ] ); + } + }; + + if ( typeof type !== "string" ) { + obj = type; + type = undefined; + } + type = type || "fx"; + + while ( i-- ) { + tmp = dataPriv.get( elements[ i ], type + "queueHooks" ); + if ( tmp && tmp.empty ) { + count++; + tmp.empty.add( resolve ); + } + } + resolve(); + return defer.promise( obj ); + } +} ); +var pnum = ( /[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/ ).source; + +var rcssNum = new RegExp( "^(?:([+-])=|)(" + pnum + ")([a-z%]*)$", "i" ); + + +var cssExpand = [ "Top", "Right", "Bottom", "Left" ]; + +var isHiddenWithinTree = function( elem, el ) { + + // isHiddenWithinTree might be called from jQuery#filter function; + // in that case, element will be second argument + elem = el || elem; + + // Inline style trumps all + return elem.style.display === "none" || + elem.style.display === "" && + + // Otherwise, check computed style + // Support: Firefox <=43 - 45 + // Disconnected elements can have computed display: none, so first confirm that elem is + // in the document. + jQuery.contains( elem.ownerDocument, elem ) && + + jQuery.css( elem, "display" ) === "none"; + }; + +var swap = function( elem, options, callback, args ) { + var ret, name, + old = {}; + + // Remember the old values, and insert the new ones + for ( name in options ) { + old[ name ] = elem.style[ name ]; + elem.style[ name ] = options[ name ]; + } + + ret = callback.apply( elem, args || [] ); + + // Revert the old values + for ( name in options ) { + elem.style[ name ] = old[ name ]; + } + + return ret; +}; + + + + +function adjustCSS( elem, prop, valueParts, tween ) { + var adjusted, + scale = 1, + maxIterations = 20, + currentValue = tween ? + function() { + return tween.cur(); + } : + function() { + return jQuery.css( elem, prop, "" ); + }, + initial = currentValue(), + unit = valueParts && valueParts[ 3 ] || ( jQuery.cssNumber[ prop ] ? "" : "px" ), + + // Starting value computation is required for potential unit mismatches + initialInUnit = ( jQuery.cssNumber[ prop ] || unit !== "px" && +initial ) && + rcssNum.exec( jQuery.css( elem, prop ) ); + + if ( initialInUnit && initialInUnit[ 3 ] !== unit ) { + + // Trust units reported by jQuery.css + unit = unit || initialInUnit[ 3 ]; + + // Make sure we update the tween properties later on + valueParts = valueParts || []; + + // Iteratively approximate from a nonzero starting point + initialInUnit = +initial || 1; + + do { + + // If previous iteration zeroed out, double until we get *something*. + // Use string for doubling so we don't accidentally see scale as unchanged below + scale = scale || ".5"; + + // Adjust and apply + initialInUnit = initialInUnit / scale; + jQuery.style( elem, prop, initialInUnit + unit ); + + // Update scale, tolerating zero or NaN from tween.cur() + // Break the loop if scale is unchanged or perfect, or if we've just had enough. + } while ( + scale !== ( scale = currentValue() / initial ) && scale !== 1 && --maxIterations + ); + } + + if ( valueParts ) { + initialInUnit = +initialInUnit || +initial || 0; + + // Apply relative offset (+=/-=) if specified + adjusted = valueParts[ 1 ] ? + initialInUnit + ( valueParts[ 1 ] + 1 ) * valueParts[ 2 ] : + +valueParts[ 2 ]; + if ( tween ) { + tween.unit = unit; + tween.start = initialInUnit; + tween.end = adjusted; + } + } + return adjusted; +} + + +var defaultDisplayMap = {}; + +function getDefaultDisplay( elem ) { + var temp, + doc = elem.ownerDocument, + nodeName = elem.nodeName, + display = defaultDisplayMap[ nodeName ]; + + if ( display ) { + return display; + } + + temp = doc.body.appendChild( doc.createElement( nodeName ) ); + display = jQuery.css( temp, "display" ); + + temp.parentNode.removeChild( temp ); + + if ( display === "none" ) { + display = "block"; + } + defaultDisplayMap[ nodeName ] = display; + + return display; +} + +function showHide( elements, show ) { + var display, elem, + values = [], + index = 0, + length = elements.length; + + // Determine new display value for elements that need to change + for ( ; index < length; index++ ) { + elem = elements[ index ]; + if ( !elem.style ) { + continue; + } + + display = elem.style.display; + if ( show ) { + + // Since we force visibility upon cascade-hidden elements, an immediate (and slow) + // check is required in this first loop unless we have a nonempty display value (either + // inline or about-to-be-restored) + if ( display === "none" ) { + values[ index ] = dataPriv.get( elem, "display" ) || null; + if ( !values[ index ] ) { + elem.style.display = ""; + } + } + if ( elem.style.display === "" && isHiddenWithinTree( elem ) ) { + values[ index ] = getDefaultDisplay( elem ); + } + } else { + if ( display !== "none" ) { + values[ index ] = "none"; + + // Remember what we're overwriting + dataPriv.set( elem, "display", display ); + } + } + } + + // Set the display of the elements in a second loop to avoid constant reflow + for ( index = 0; index < length; index++ ) { + if ( values[ index ] != null ) { + elements[ index ].style.display = values[ index ]; + } + } + + return elements; +} + +jQuery.fn.extend( { + show: function() { + return showHide( this, true ); + }, + hide: function() { + return showHide( this ); + }, + toggle: function( state ) { + if ( typeof state === "boolean" ) { + return state ? this.show() : this.hide(); + } + + return this.each( function() { + if ( isHiddenWithinTree( this ) ) { + jQuery( this ).show(); + } else { + jQuery( this ).hide(); + } + } ); + } +} ); +var rcheckableType = ( /^(?:checkbox|radio)$/i ); + +var rtagName = ( /<([a-z][^\/\0>\x20\t\r\n\f]+)/i ); + +var rscriptType = ( /^$|\/(?:java|ecma)script/i ); + + + +// We have to close these tags to support XHTML (#13200) +var wrapMap = { + + // Support: IE <=9 only + option: [ 1, "" ], + + // XHTML parsers do not magically insert elements in the + // same way that tag soup parsers do. So we cannot shorten + // this by omitting or other required elements. + thead: [ 1, "", "
" ], + col: [ 2, "", "
" ], + tr: [ 2, "", "
" ], + td: [ 3, "", "
" ], + + _default: [ 0, "", "" ] +}; + +// Support: IE <=9 only +wrapMap.optgroup = wrapMap.option; + +wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead; +wrapMap.th = wrapMap.td; + + +function getAll( context, tag ) { + + // Support: IE <=9 - 11 only + // Use typeof to avoid zero-argument method invocation on host objects (#15151) + var ret; + + if ( typeof context.getElementsByTagName !== "undefined" ) { + ret = context.getElementsByTagName( tag || "*" ); + + } else if ( typeof context.querySelectorAll !== "undefined" ) { + ret = context.querySelectorAll( tag || "*" ); + + } else { + ret = []; + } + + if ( tag === undefined || tag && nodeName( context, tag ) ) { + return jQuery.merge( [ context ], ret ); + } + + return ret; +} + + +// Mark scripts as having already been evaluated +function setGlobalEval( elems, refElements ) { + var i = 0, + l = elems.length; + + for ( ; i < l; i++ ) { + dataPriv.set( + elems[ i ], + "globalEval", + !refElements || dataPriv.get( refElements[ i ], "globalEval" ) + ); + } +} + + +var rhtml = /<|&#?\w+;/; + +function buildFragment( elems, context, scripts, selection, ignored ) { + var elem, tmp, tag, wrap, contains, j, + fragment = context.createDocumentFragment(), + nodes = [], + i = 0, + l = elems.length; + + for ( ; i < l; i++ ) { + elem = elems[ i ]; + + if ( elem || elem === 0 ) { + + // Add nodes directly + if ( jQuery.type( elem ) === "object" ) { + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem ); + + // Convert non-html into a text node + } else if ( !rhtml.test( elem ) ) { + nodes.push( context.createTextNode( elem ) ); + + // Convert html into DOM nodes + } else { + tmp = tmp || fragment.appendChild( context.createElement( "div" ) ); + + // Deserialize a standard representation + tag = ( rtagName.exec( elem ) || [ "", "" ] )[ 1 ].toLowerCase(); + wrap = wrapMap[ tag ] || wrapMap._default; + tmp.innerHTML = wrap[ 1 ] + jQuery.htmlPrefilter( elem ) + wrap[ 2 ]; + + // Descend through wrappers to the right content + j = wrap[ 0 ]; + while ( j-- ) { + tmp = tmp.lastChild; + } + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( nodes, tmp.childNodes ); + + // Remember the top-level container + tmp = fragment.firstChild; + + // Ensure the created nodes are orphaned (#12392) + tmp.textContent = ""; + } + } + } + + // Remove wrapper from fragment + fragment.textContent = ""; + + i = 0; + while ( ( elem = nodes[ i++ ] ) ) { + + // Skip elements already in the context collection (trac-4087) + if ( selection && jQuery.inArray( elem, selection ) > -1 ) { + if ( ignored ) { + ignored.push( elem ); + } + continue; + } + + contains = jQuery.contains( elem.ownerDocument, elem ); + + // Append to fragment + tmp = getAll( fragment.appendChild( elem ), "script" ); + + // Preserve script evaluation history + if ( contains ) { + setGlobalEval( tmp ); + } + + // Capture executables + if ( scripts ) { + j = 0; + while ( ( elem = tmp[ j++ ] ) ) { + if ( rscriptType.test( elem.type || "" ) ) { + scripts.push( elem ); + } + } + } + } + + return fragment; +} + + +( function() { + var fragment = document.createDocumentFragment(), + div = fragment.appendChild( document.createElement( "div" ) ), + input = document.createElement( "input" ); + + // Support: Android 4.0 - 4.3 only + // Check state lost if the name is set (#11217) + // Support: Windows Web Apps (WWA) + // `name` and `type` must use .setAttribute for WWA (#14901) + input.setAttribute( "type", "radio" ); + input.setAttribute( "checked", "checked" ); + input.setAttribute( "name", "t" ); + + div.appendChild( input ); + + // Support: Android <=4.1 only + // Older WebKit doesn't clone checked state correctly in fragments + support.checkClone = div.cloneNode( true ).cloneNode( true ).lastChild.checked; + + // Support: IE <=11 only + // Make sure textarea (and checkbox) defaultValue is properly cloned + div.innerHTML = ""; + support.noCloneChecked = !!div.cloneNode( true ).lastChild.defaultValue; +} )(); +var documentElement = document.documentElement; + + + +var + rkeyEvent = /^key/, + rmouseEvent = /^(?:mouse|pointer|contextmenu|drag|drop)|click/, + rtypenamespace = /^([^.]*)(?:\.(.+)|)/; + +function returnTrue() { + return true; +} + +function returnFalse() { + return false; +} + +// Support: IE <=9 only +// See #13393 for more info +function safeActiveElement() { + try { + return document.activeElement; + } catch ( err ) { } +} + +function on( elem, types, selector, data, fn, one ) { + var origFn, type; + + // Types can be a map of types/handlers + if ( typeof types === "object" ) { + + // ( types-Object, selector, data ) + if ( typeof selector !== "string" ) { + + // ( types-Object, data ) + data = data || selector; + selector = undefined; + } + for ( type in types ) { + on( elem, type, selector, data, types[ type ], one ); + } + return elem; + } + + if ( data == null && fn == null ) { + + // ( types, fn ) + fn = selector; + data = selector = undefined; + } else if ( fn == null ) { + if ( typeof selector === "string" ) { + + // ( types, selector, fn ) + fn = data; + data = undefined; + } else { + + // ( types, data, fn ) + fn = data; + data = selector; + selector = undefined; + } + } + if ( fn === false ) { + fn = returnFalse; + } else if ( !fn ) { + return elem; + } + + if ( one === 1 ) { + origFn = fn; + fn = function( event ) { + + // Can use an empty set, since event contains the info + jQuery().off( event ); + return origFn.apply( this, arguments ); + }; + + // Use same guid so caller can remove using origFn + fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); + } + return elem.each( function() { + jQuery.event.add( this, types, fn, data, selector ); + } ); +} + +/* + * Helper functions for managing events -- not part of the public interface. + * Props to Dean Edwards' addEvent library for many of the ideas. + */ +jQuery.event = { + + global: {}, + + add: function( elem, types, handler, data, selector ) { + + var handleObjIn, eventHandle, tmp, + events, t, handleObj, + special, handlers, type, namespaces, origType, + elemData = dataPriv.get( elem ); + + // Don't attach events to noData or text/comment nodes (but allow plain objects) + if ( !elemData ) { + return; + } + + // Caller can pass in an object of custom data in lieu of the handler + if ( handler.handler ) { + handleObjIn = handler; + handler = handleObjIn.handler; + selector = handleObjIn.selector; + } + + // Ensure that invalid selectors throw exceptions at attach time + // Evaluate against documentElement in case elem is a non-element node (e.g., document) + if ( selector ) { + jQuery.find.matchesSelector( documentElement, selector ); + } + + // Make sure that the handler has a unique ID, used to find/remove it later + if ( !handler.guid ) { + handler.guid = jQuery.guid++; + } + + // Init the element's event structure and main handler, if this is the first + if ( !( events = elemData.events ) ) { + events = elemData.events = {}; + } + if ( !( eventHandle = elemData.handle ) ) { + eventHandle = elemData.handle = function( e ) { + + // Discard the second event of a jQuery.event.trigger() and + // when an event is called after a page has unloaded + return typeof jQuery !== "undefined" && jQuery.event.triggered !== e.type ? + jQuery.event.dispatch.apply( elem, arguments ) : undefined; + }; + } + + // Handle multiple events separated by a space + types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; + t = types.length; + while ( t-- ) { + tmp = rtypenamespace.exec( types[ t ] ) || []; + type = origType = tmp[ 1 ]; + namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); + + // There *must* be a type, no attaching namespace-only handlers + if ( !type ) { + continue; + } + + // If event changes its type, use the special event handlers for the changed type + special = jQuery.event.special[ type ] || {}; + + // If selector defined, determine special event api type, otherwise given type + type = ( selector ? special.delegateType : special.bindType ) || type; + + // Update special based on newly reset type + special = jQuery.event.special[ type ] || {}; + + // handleObj is passed to all event handlers + handleObj = jQuery.extend( { + type: type, + origType: origType, + data: data, + handler: handler, + guid: handler.guid, + selector: selector, + needsContext: selector && jQuery.expr.match.needsContext.test( selector ), + namespace: namespaces.join( "." ) + }, handleObjIn ); + + // Init the event handler queue if we're the first + if ( !( handlers = events[ type ] ) ) { + handlers = events[ type ] = []; + handlers.delegateCount = 0; + + // Only use addEventListener if the special events handler returns false + if ( !special.setup || + special.setup.call( elem, data, namespaces, eventHandle ) === false ) { + + if ( elem.addEventListener ) { + elem.addEventListener( type, eventHandle ); + } + } + } + + if ( special.add ) { + special.add.call( elem, handleObj ); + + if ( !handleObj.handler.guid ) { + handleObj.handler.guid = handler.guid; + } + } + + // Add to the element's handler list, delegates in front + if ( selector ) { + handlers.splice( handlers.delegateCount++, 0, handleObj ); + } else { + handlers.push( handleObj ); + } + + // Keep track of which events have ever been used, for event optimization + jQuery.event.global[ type ] = true; + } + + }, + + // Detach an event or set of events from an element + remove: function( elem, types, handler, selector, mappedTypes ) { + + var j, origCount, tmp, + events, t, handleObj, + special, handlers, type, namespaces, origType, + elemData = dataPriv.hasData( elem ) && dataPriv.get( elem ); + + if ( !elemData || !( events = elemData.events ) ) { + return; + } + + // Once for each type.namespace in types; type may be omitted + types = ( types || "" ).match( rnothtmlwhite ) || [ "" ]; + t = types.length; + while ( t-- ) { + tmp = rtypenamespace.exec( types[ t ] ) || []; + type = origType = tmp[ 1 ]; + namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); + + // Unbind all events (on this namespace, if provided) for the element + if ( !type ) { + for ( type in events ) { + jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); + } + continue; + } + + special = jQuery.event.special[ type ] || {}; + type = ( selector ? special.delegateType : special.bindType ) || type; + handlers = events[ type ] || []; + tmp = tmp[ 2 ] && + new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ); + + // Remove matching events + origCount = j = handlers.length; + while ( j-- ) { + handleObj = handlers[ j ]; + + if ( ( mappedTypes || origType === handleObj.origType ) && + ( !handler || handler.guid === handleObj.guid ) && + ( !tmp || tmp.test( handleObj.namespace ) ) && + ( !selector || selector === handleObj.selector || + selector === "**" && handleObj.selector ) ) { + handlers.splice( j, 1 ); + + if ( handleObj.selector ) { + handlers.delegateCount--; + } + if ( special.remove ) { + special.remove.call( elem, handleObj ); + } + } + } + + // Remove generic event handler if we removed something and no more handlers exist + // (avoids potential for endless recursion during removal of special event handlers) + if ( origCount && !handlers.length ) { + if ( !special.teardown || + special.teardown.call( elem, namespaces, elemData.handle ) === false ) { + + jQuery.removeEvent( elem, type, elemData.handle ); + } + + delete events[ type ]; + } + } + + // Remove data and the expando if it's no longer used + if ( jQuery.isEmptyObject( events ) ) { + dataPriv.remove( elem, "handle events" ); + } + }, + + dispatch: function( nativeEvent ) { + + // Make a writable jQuery.Event from the native event object + var event = jQuery.event.fix( nativeEvent ); + + var i, j, ret, matched, handleObj, handlerQueue, + args = new Array( arguments.length ), + handlers = ( dataPriv.get( this, "events" ) || {} )[ event.type ] || [], + special = jQuery.event.special[ event.type ] || {}; + + // Use the fix-ed jQuery.Event rather than the (read-only) native event + args[ 0 ] = event; + + for ( i = 1; i < arguments.length; i++ ) { + args[ i ] = arguments[ i ]; + } + + event.delegateTarget = this; + + // Call the preDispatch hook for the mapped type, and let it bail if desired + if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) { + return; + } + + // Determine handlers + handlerQueue = jQuery.event.handlers.call( this, event, handlers ); + + // Run delegates first; they may want to stop propagation beneath us + i = 0; + while ( ( matched = handlerQueue[ i++ ] ) && !event.isPropagationStopped() ) { + event.currentTarget = matched.elem; + + j = 0; + while ( ( handleObj = matched.handlers[ j++ ] ) && + !event.isImmediatePropagationStopped() ) { + + // Triggered event must either 1) have no namespace, or 2) have namespace(s) + // a subset or equal to those in the bound event (both can have no namespace). + if ( !event.rnamespace || event.rnamespace.test( handleObj.namespace ) ) { + + event.handleObj = handleObj; + event.data = handleObj.data; + + ret = ( ( jQuery.event.special[ handleObj.origType ] || {} ).handle || + handleObj.handler ).apply( matched.elem, args ); + + if ( ret !== undefined ) { + if ( ( event.result = ret ) === false ) { + event.preventDefault(); + event.stopPropagation(); + } + } + } + } + } + + // Call the postDispatch hook for the mapped type + if ( special.postDispatch ) { + special.postDispatch.call( this, event ); + } + + return event.result; + }, + + handlers: function( event, handlers ) { + var i, handleObj, sel, matchedHandlers, matchedSelectors, + handlerQueue = [], + delegateCount = handlers.delegateCount, + cur = event.target; + + // Find delegate handlers + if ( delegateCount && + + // Support: IE <=9 + // Black-hole SVG instance trees (trac-13180) + cur.nodeType && + + // Support: Firefox <=42 + // Suppress spec-violating clicks indicating a non-primary pointer button (trac-3861) + // https://www.w3.org/TR/DOM-Level-3-Events/#event-type-click + // Support: IE 11 only + // ...but not arrow key "clicks" of radio inputs, which can have `button` -1 (gh-2343) + !( event.type === "click" && event.button >= 1 ) ) { + + for ( ; cur !== this; cur = cur.parentNode || this ) { + + // Don't check non-elements (#13208) + // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764) + if ( cur.nodeType === 1 && !( event.type === "click" && cur.disabled === true ) ) { + matchedHandlers = []; + matchedSelectors = {}; + for ( i = 0; i < delegateCount; i++ ) { + handleObj = handlers[ i ]; + + // Don't conflict with Object.prototype properties (#13203) + sel = handleObj.selector + " "; + + if ( matchedSelectors[ sel ] === undefined ) { + matchedSelectors[ sel ] = handleObj.needsContext ? + jQuery( sel, this ).index( cur ) > -1 : + jQuery.find( sel, this, null, [ cur ] ).length; + } + if ( matchedSelectors[ sel ] ) { + matchedHandlers.push( handleObj ); + } + } + if ( matchedHandlers.length ) { + handlerQueue.push( { elem: cur, handlers: matchedHandlers } ); + } + } + } + } + + // Add the remaining (directly-bound) handlers + cur = this; + if ( delegateCount < handlers.length ) { + handlerQueue.push( { elem: cur, handlers: handlers.slice( delegateCount ) } ); + } + + return handlerQueue; + }, + + addProp: function( name, hook ) { + Object.defineProperty( jQuery.Event.prototype, name, { + enumerable: true, + configurable: true, + + get: jQuery.isFunction( hook ) ? + function() { + if ( this.originalEvent ) { + return hook( this.originalEvent ); + } + } : + function() { + if ( this.originalEvent ) { + return this.originalEvent[ name ]; + } + }, + + set: function( value ) { + Object.defineProperty( this, name, { + enumerable: true, + configurable: true, + writable: true, + value: value + } ); + } + } ); + }, + + fix: function( originalEvent ) { + return originalEvent[ jQuery.expando ] ? + originalEvent : + new jQuery.Event( originalEvent ); + }, + + special: { + load: { + + // Prevent triggered image.load events from bubbling to window.load + noBubble: true + }, + focus: { + + // Fire native event if possible so blur/focus sequence is correct + trigger: function() { + if ( this !== safeActiveElement() && this.focus ) { + this.focus(); + return false; + } + }, + delegateType: "focusin" + }, + blur: { + trigger: function() { + if ( this === safeActiveElement() && this.blur ) { + this.blur(); + return false; + } + }, + delegateType: "focusout" + }, + click: { + + // For checkbox, fire native event so checked state will be right + trigger: function() { + if ( this.type === "checkbox" && this.click && nodeName( this, "input" ) ) { + this.click(); + return false; + } + }, + + // For cross-browser consistency, don't fire native .click() on links + _default: function( event ) { + return nodeName( event.target, "a" ); + } + }, + + beforeunload: { + postDispatch: function( event ) { + + // Support: Firefox 20+ + // Firefox doesn't alert if the returnValue field is not set. + if ( event.result !== undefined && event.originalEvent ) { + event.originalEvent.returnValue = event.result; + } + } + } + } +}; + +jQuery.removeEvent = function( elem, type, handle ) { + + // This "if" is needed for plain objects + if ( elem.removeEventListener ) { + elem.removeEventListener( type, handle ); + } +}; + +jQuery.Event = function( src, props ) { + + // Allow instantiation without the 'new' keyword + if ( !( this instanceof jQuery.Event ) ) { + return new jQuery.Event( src, props ); + } + + // Event object + if ( src && src.type ) { + this.originalEvent = src; + this.type = src.type; + + // Events bubbling up the document may have been marked as prevented + // by a handler lower down the tree; reflect the correct value. + this.isDefaultPrevented = src.defaultPrevented || + src.defaultPrevented === undefined && + + // Support: Android <=2.3 only + src.returnValue === false ? + returnTrue : + returnFalse; + + // Create target properties + // Support: Safari <=6 - 7 only + // Target should not be a text node (#504, #13143) + this.target = ( src.target && src.target.nodeType === 3 ) ? + src.target.parentNode : + src.target; + + this.currentTarget = src.currentTarget; + this.relatedTarget = src.relatedTarget; + + // Event type + } else { + this.type = src; + } + + // Put explicitly provided properties onto the event object + if ( props ) { + jQuery.extend( this, props ); + } + + // Create a timestamp if incoming event doesn't have one + this.timeStamp = src && src.timeStamp || jQuery.now(); + + // Mark it as fixed + this[ jQuery.expando ] = true; +}; + +// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding +// https://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html +jQuery.Event.prototype = { + constructor: jQuery.Event, + isDefaultPrevented: returnFalse, + isPropagationStopped: returnFalse, + isImmediatePropagationStopped: returnFalse, + isSimulated: false, + + preventDefault: function() { + var e = this.originalEvent; + + this.isDefaultPrevented = returnTrue; + + if ( e && !this.isSimulated ) { + e.preventDefault(); + } + }, + stopPropagation: function() { + var e = this.originalEvent; + + this.isPropagationStopped = returnTrue; + + if ( e && !this.isSimulated ) { + e.stopPropagation(); + } + }, + stopImmediatePropagation: function() { + var e = this.originalEvent; + + this.isImmediatePropagationStopped = returnTrue; + + if ( e && !this.isSimulated ) { + e.stopImmediatePropagation(); + } + + this.stopPropagation(); + } +}; + +// Includes all common event props including KeyEvent and MouseEvent specific props +jQuery.each( { + altKey: true, + bubbles: true, + cancelable: true, + changedTouches: true, + ctrlKey: true, + detail: true, + eventPhase: true, + metaKey: true, + pageX: true, + pageY: true, + shiftKey: true, + view: true, + "char": true, + charCode: true, + key: true, + keyCode: true, + button: true, + buttons: true, + clientX: true, + clientY: true, + offsetX: true, + offsetY: true, + pointerId: true, + pointerType: true, + screenX: true, + screenY: true, + targetTouches: true, + toElement: true, + touches: true, + + which: function( event ) { + var button = event.button; + + // Add which for key events + if ( event.which == null && rkeyEvent.test( event.type ) ) { + return event.charCode != null ? event.charCode : event.keyCode; + } + + // Add which for click: 1 === left; 2 === middle; 3 === right + if ( !event.which && button !== undefined && rmouseEvent.test( event.type ) ) { + if ( button & 1 ) { + return 1; + } + + if ( button & 2 ) { + return 3; + } + + if ( button & 4 ) { + return 2; + } + + return 0; + } + + return event.which; + } +}, jQuery.event.addProp ); + +// Create mouseenter/leave events using mouseover/out and event-time checks +// so that event delegation works in jQuery. +// Do the same for pointerenter/pointerleave and pointerover/pointerout +// +// Support: Safari 7 only +// Safari sends mouseenter too often; see: +// https://bugs.chromium.org/p/chromium/issues/detail?id=470258 +// for the description of the bug (it existed in older Chrome versions as well). +jQuery.each( { + mouseenter: "mouseover", + mouseleave: "mouseout", + pointerenter: "pointerover", + pointerleave: "pointerout" +}, function( orig, fix ) { + jQuery.event.special[ orig ] = { + delegateType: fix, + bindType: fix, + + handle: function( event ) { + var ret, + target = this, + related = event.relatedTarget, + handleObj = event.handleObj; + + // For mouseenter/leave call the handler if related is outside the target. + // NB: No relatedTarget if the mouse left/entered the browser window + if ( !related || ( related !== target && !jQuery.contains( target, related ) ) ) { + event.type = handleObj.origType; + ret = handleObj.handler.apply( this, arguments ); + event.type = fix; + } + return ret; + } + }; +} ); + +jQuery.fn.extend( { + + on: function( types, selector, data, fn ) { + return on( this, types, selector, data, fn ); + }, + one: function( types, selector, data, fn ) { + return on( this, types, selector, data, fn, 1 ); + }, + off: function( types, selector, fn ) { + var handleObj, type; + if ( types && types.preventDefault && types.handleObj ) { + + // ( event ) dispatched jQuery.Event + handleObj = types.handleObj; + jQuery( types.delegateTarget ).off( + handleObj.namespace ? + handleObj.origType + "." + handleObj.namespace : + handleObj.origType, + handleObj.selector, + handleObj.handler + ); + return this; + } + if ( typeof types === "object" ) { + + // ( types-object [, selector] ) + for ( type in types ) { + this.off( type, selector, types[ type ] ); + } + return this; + } + if ( selector === false || typeof selector === "function" ) { + + // ( types [, fn] ) + fn = selector; + selector = undefined; + } + if ( fn === false ) { + fn = returnFalse; + } + return this.each( function() { + jQuery.event.remove( this, types, fn, selector ); + } ); + } +} ); + + +var + + /* eslint-disable max-len */ + + // See https://github.com/eslint/eslint/issues/3229 + rxhtmlTag = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([a-z][^\/\0>\x20\t\r\n\f]*)[^>]*)\/>/gi, + + /* eslint-enable */ + + // Support: IE <=10 - 11, Edge 12 - 13 + // In IE/Edge using regex groups here causes severe slowdowns. + // See https://connect.microsoft.com/IE/feedback/details/1736512/ + rnoInnerhtml = /\s*$/g; + +// Prefer a tbody over its parent table for containing new rows +function manipulationTarget( elem, content ) { + if ( nodeName( elem, "table" ) && + nodeName( content.nodeType !== 11 ? content : content.firstChild, "tr" ) ) { + + return jQuery( ">tbody", elem )[ 0 ] || elem; + } + + return elem; +} + +// Replace/restore the type attribute of script elements for safe DOM manipulation +function disableScript( elem ) { + elem.type = ( elem.getAttribute( "type" ) !== null ) + "/" + elem.type; + return elem; +} +function restoreScript( elem ) { + var match = rscriptTypeMasked.exec( elem.type ); + + if ( match ) { + elem.type = match[ 1 ]; + } else { + elem.removeAttribute( "type" ); + } + + return elem; +} + +function cloneCopyEvent( src, dest ) { + var i, l, type, pdataOld, pdataCur, udataOld, udataCur, events; + + if ( dest.nodeType !== 1 ) { + return; + } + + // 1. Copy private data: events, handlers, etc. + if ( dataPriv.hasData( src ) ) { + pdataOld = dataPriv.access( src ); + pdataCur = dataPriv.set( dest, pdataOld ); + events = pdataOld.events; + + if ( events ) { + delete pdataCur.handle; + pdataCur.events = {}; + + for ( type in events ) { + for ( i = 0, l = events[ type ].length; i < l; i++ ) { + jQuery.event.add( dest, type, events[ type ][ i ] ); + } + } + } + } + + // 2. Copy user data + if ( dataUser.hasData( src ) ) { + udataOld = dataUser.access( src ); + udataCur = jQuery.extend( {}, udataOld ); + + dataUser.set( dest, udataCur ); + } +} + +// Fix IE bugs, see support tests +function fixInput( src, dest ) { + var nodeName = dest.nodeName.toLowerCase(); + + // Fails to persist the checked state of a cloned checkbox or radio button. + if ( nodeName === "input" && rcheckableType.test( src.type ) ) { + dest.checked = src.checked; + + // Fails to return the selected option to the default selected state when cloning options + } else if ( nodeName === "input" || nodeName === "textarea" ) { + dest.defaultValue = src.defaultValue; + } +} + +function domManip( collection, args, callback, ignored ) { + + // Flatten any nested arrays + args = concat.apply( [], args ); + + var fragment, first, scripts, hasScripts, node, doc, + i = 0, + l = collection.length, + iNoClone = l - 1, + value = args[ 0 ], + isFunction = jQuery.isFunction( value ); + + // We can't cloneNode fragments that contain checked, in WebKit + if ( isFunction || + ( l > 1 && typeof value === "string" && + !support.checkClone && rchecked.test( value ) ) ) { + return collection.each( function( index ) { + var self = collection.eq( index ); + if ( isFunction ) { + args[ 0 ] = value.call( this, index, self.html() ); + } + domManip( self, args, callback, ignored ); + } ); + } + + if ( l ) { + fragment = buildFragment( args, collection[ 0 ].ownerDocument, false, collection, ignored ); + first = fragment.firstChild; + + if ( fragment.childNodes.length === 1 ) { + fragment = first; + } + + // Require either new content or an interest in ignored elements to invoke the callback + if ( first || ignored ) { + scripts = jQuery.map( getAll( fragment, "script" ), disableScript ); + hasScripts = scripts.length; + + // Use the original fragment for the last item + // instead of the first because it can end up + // being emptied incorrectly in certain situations (#8070). + for ( ; i < l; i++ ) { + node = fragment; + + if ( i !== iNoClone ) { + node = jQuery.clone( node, true, true ); + + // Keep references to cloned scripts for later restoration + if ( hasScripts ) { + + // Support: Android <=4.0 only, PhantomJS 1 only + // push.apply(_, arraylike) throws on ancient WebKit + jQuery.merge( scripts, getAll( node, "script" ) ); + } + } + + callback.call( collection[ i ], node, i ); + } + + if ( hasScripts ) { + doc = scripts[ scripts.length - 1 ].ownerDocument; + + // Reenable scripts + jQuery.map( scripts, restoreScript ); + + // Evaluate executable scripts on first document insertion + for ( i = 0; i < hasScripts; i++ ) { + node = scripts[ i ]; + if ( rscriptType.test( node.type || "" ) && + !dataPriv.access( node, "globalEval" ) && + jQuery.contains( doc, node ) ) { + + if ( node.src ) { + + // Optional AJAX dependency, but won't run scripts if not present + if ( jQuery._evalUrl ) { + jQuery._evalUrl( node.src ); + } + } else { + DOMEval( node.textContent.replace( rcleanScript, "" ), doc ); + } + } + } + } + } + } + + return collection; +} + +function remove( elem, selector, keepData ) { + var node, + nodes = selector ? jQuery.filter( selector, elem ) : elem, + i = 0; + + for ( ; ( node = nodes[ i ] ) != null; i++ ) { + if ( !keepData && node.nodeType === 1 ) { + jQuery.cleanData( getAll( node ) ); + } + + if ( node.parentNode ) { + if ( keepData && jQuery.contains( node.ownerDocument, node ) ) { + setGlobalEval( getAll( node, "script" ) ); + } + node.parentNode.removeChild( node ); + } + } + + return elem; +} + +jQuery.extend( { + htmlPrefilter: function( html ) { + return html.replace( rxhtmlTag, "<$1>" ); + }, + + clone: function( elem, dataAndEvents, deepDataAndEvents ) { + var i, l, srcElements, destElements, + clone = elem.cloneNode( true ), + inPage = jQuery.contains( elem.ownerDocument, elem ); + + // Fix IE cloning issues + if ( !support.noCloneChecked && ( elem.nodeType === 1 || elem.nodeType === 11 ) && + !jQuery.isXMLDoc( elem ) ) { + + // We eschew Sizzle here for performance reasons: https://jsperf.com/getall-vs-sizzle/2 + destElements = getAll( clone ); + srcElements = getAll( elem ); + + for ( i = 0, l = srcElements.length; i < l; i++ ) { + fixInput( srcElements[ i ], destElements[ i ] ); + } + } + + // Copy the events from the original to the clone + if ( dataAndEvents ) { + if ( deepDataAndEvents ) { + srcElements = srcElements || getAll( elem ); + destElements = destElements || getAll( clone ); + + for ( i = 0, l = srcElements.length; i < l; i++ ) { + cloneCopyEvent( srcElements[ i ], destElements[ i ] ); + } + } else { + cloneCopyEvent( elem, clone ); + } + } + + // Preserve script evaluation history + destElements = getAll( clone, "script" ); + if ( destElements.length > 0 ) { + setGlobalEval( destElements, !inPage && getAll( elem, "script" ) ); + } + + // Return the cloned set + return clone; + }, + + cleanData: function( elems ) { + var data, elem, type, + special = jQuery.event.special, + i = 0; + + for ( ; ( elem = elems[ i ] ) !== undefined; i++ ) { + if ( acceptData( elem ) ) { + if ( ( data = elem[ dataPriv.expando ] ) ) { + if ( data.events ) { + for ( type in data.events ) { + if ( special[ type ] ) { + jQuery.event.remove( elem, type ); + + // This is a shortcut to avoid jQuery.event.remove's overhead + } else { + jQuery.removeEvent( elem, type, data.handle ); + } + } + } + + // Support: Chrome <=35 - 45+ + // Assign undefined instead of using delete, see Data#remove + elem[ dataPriv.expando ] = undefined; + } + if ( elem[ dataUser.expando ] ) { + + // Support: Chrome <=35 - 45+ + // Assign undefined instead of using delete, see Data#remove + elem[ dataUser.expando ] = undefined; + } + } + } + } +} ); + +jQuery.fn.extend( { + detach: function( selector ) { + return remove( this, selector, true ); + }, + + remove: function( selector ) { + return remove( this, selector ); + }, + + text: function( value ) { + return access( this, function( value ) { + return value === undefined ? + jQuery.text( this ) : + this.empty().each( function() { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + this.textContent = value; + } + } ); + }, null, value, arguments.length ); + }, + + append: function() { + return domManip( this, arguments, function( elem ) { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + var target = manipulationTarget( this, elem ); + target.appendChild( elem ); + } + } ); + }, + + prepend: function() { + return domManip( this, arguments, function( elem ) { + if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) { + var target = manipulationTarget( this, elem ); + target.insertBefore( elem, target.firstChild ); + } + } ); + }, + + before: function() { + return domManip( this, arguments, function( elem ) { + if ( this.parentNode ) { + this.parentNode.insertBefore( elem, this ); + } + } ); + }, + + after: function() { + return domManip( this, arguments, function( elem ) { + if ( this.parentNode ) { + this.parentNode.insertBefore( elem, this.nextSibling ); + } + } ); + }, + + empty: function() { + var elem, + i = 0; + + for ( ; ( elem = this[ i ] ) != null; i++ ) { + if ( elem.nodeType === 1 ) { + + // Prevent memory leaks + jQuery.cleanData( getAll( elem, false ) ); + + // Remove any remaining nodes + elem.textContent = ""; + } + } + + return this; + }, + + clone: function( dataAndEvents, deepDataAndEvents ) { + dataAndEvents = dataAndEvents == null ? false : dataAndEvents; + deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents; + + return this.map( function() { + return jQuery.clone( this, dataAndEvents, deepDataAndEvents ); + } ); + }, + + html: function( value ) { + return access( this, function( value ) { + var elem = this[ 0 ] || {}, + i = 0, + l = this.length; + + if ( value === undefined && elem.nodeType === 1 ) { + return elem.innerHTML; + } + + // See if we can take a shortcut and just use innerHTML + if ( typeof value === "string" && !rnoInnerhtml.test( value ) && + !wrapMap[ ( rtagName.exec( value ) || [ "", "" ] )[ 1 ].toLowerCase() ] ) { + + value = jQuery.htmlPrefilter( value ); + + try { + for ( ; i < l; i++ ) { + elem = this[ i ] || {}; + + // Remove element nodes and prevent memory leaks + if ( elem.nodeType === 1 ) { + jQuery.cleanData( getAll( elem, false ) ); + elem.innerHTML = value; + } + } + + elem = 0; + + // If using innerHTML throws an exception, use the fallback method + } catch ( e ) {} + } + + if ( elem ) { + this.empty().append( value ); + } + }, null, value, arguments.length ); + }, + + replaceWith: function() { + var ignored = []; + + // Make the changes, replacing each non-ignored context element with the new content + return domManip( this, arguments, function( elem ) { + var parent = this.parentNode; + + if ( jQuery.inArray( this, ignored ) < 0 ) { + jQuery.cleanData( getAll( this ) ); + if ( parent ) { + parent.replaceChild( elem, this ); + } + } + + // Force callback invocation + }, ignored ); + } +} ); + +jQuery.each( { + appendTo: "append", + prependTo: "prepend", + insertBefore: "before", + insertAfter: "after", + replaceAll: "replaceWith" +}, function( name, original ) { + jQuery.fn[ name ] = function( selector ) { + var elems, + ret = [], + insert = jQuery( selector ), + last = insert.length - 1, + i = 0; + + for ( ; i <= last; i++ ) { + elems = i === last ? this : this.clone( true ); + jQuery( insert[ i ] )[ original ]( elems ); + + // Support: Android <=4.0 only, PhantomJS 1 only + // .get() because push.apply(_, arraylike) throws on ancient WebKit + push.apply( ret, elems.get() ); + } + + return this.pushStack( ret ); + }; +} ); +var rmargin = ( /^margin/ ); + +var rnumnonpx = new RegExp( "^(" + pnum + ")(?!px)[a-z%]+$", "i" ); + +var getStyles = function( elem ) { + + // Support: IE <=11 only, Firefox <=30 (#15098, #14150) + // IE throws on elements created in popups + // FF meanwhile throws on frame elements through "defaultView.getComputedStyle" + var view = elem.ownerDocument.defaultView; + + if ( !view || !view.opener ) { + view = window; + } + + return view.getComputedStyle( elem ); + }; + + + +( function() { + + // Executing both pixelPosition & boxSizingReliable tests require only one layout + // so they're executed at the same time to save the second computation. + function computeStyleTests() { + + // This is a singleton, we need to execute it only once + if ( !div ) { + return; + } + + div.style.cssText = + "box-sizing:border-box;" + + "position:relative;display:block;" + + "margin:auto;border:1px;padding:1px;" + + "top:1%;width:50%"; + div.innerHTML = ""; + documentElement.appendChild( container ); + + var divStyle = window.getComputedStyle( div ); + pixelPositionVal = divStyle.top !== "1%"; + + // Support: Android 4.0 - 4.3 only, Firefox <=3 - 44 + reliableMarginLeftVal = divStyle.marginLeft === "2px"; + boxSizingReliableVal = divStyle.width === "4px"; + + // Support: Android 4.0 - 4.3 only + // Some styles come back with percentage values, even though they shouldn't + div.style.marginRight = "50%"; + pixelMarginRightVal = divStyle.marginRight === "4px"; + + documentElement.removeChild( container ); + + // Nullify the div so it wouldn't be stored in the memory and + // it will also be a sign that checks already performed + div = null; + } + + var pixelPositionVal, boxSizingReliableVal, pixelMarginRightVal, reliableMarginLeftVal, + container = document.createElement( "div" ), + div = document.createElement( "div" ); + + // Finish early in limited (non-browser) environments + if ( !div.style ) { + return; + } + + // Support: IE <=9 - 11 only + // Style of cloned element affects source element cloned (#8908) + div.style.backgroundClip = "content-box"; + div.cloneNode( true ).style.backgroundClip = ""; + support.clearCloneStyle = div.style.backgroundClip === "content-box"; + + container.style.cssText = "border:0;width:8px;height:0;top:0;left:-9999px;" + + "padding:0;margin-top:1px;position:absolute"; + container.appendChild( div ); + + jQuery.extend( support, { + pixelPosition: function() { + computeStyleTests(); + return pixelPositionVal; + }, + boxSizingReliable: function() { + computeStyleTests(); + return boxSizingReliableVal; + }, + pixelMarginRight: function() { + computeStyleTests(); + return pixelMarginRightVal; + }, + reliableMarginLeft: function() { + computeStyleTests(); + return reliableMarginLeftVal; + } + } ); +} )(); + + +function curCSS( elem, name, computed ) { + var width, minWidth, maxWidth, ret, + + // Support: Firefox 51+ + // Retrieving style before computed somehow + // fixes an issue with getting wrong values + // on detached elements + style = elem.style; + + computed = computed || getStyles( elem ); + + // getPropertyValue is needed for: + // .css('filter') (IE 9 only, #12537) + // .css('--customProperty) (#3144) + if ( computed ) { + ret = computed.getPropertyValue( name ) || computed[ name ]; + + if ( ret === "" && !jQuery.contains( elem.ownerDocument, elem ) ) { + ret = jQuery.style( elem, name ); + } + + // A tribute to the "awesome hack by Dean Edwards" + // Android Browser returns percentage for some values, + // but width seems to be reliably pixels. + // This is against the CSSOM draft spec: + // https://drafts.csswg.org/cssom/#resolved-values + if ( !support.pixelMarginRight() && rnumnonpx.test( ret ) && rmargin.test( name ) ) { + + // Remember the original values + width = style.width; + minWidth = style.minWidth; + maxWidth = style.maxWidth; + + // Put in the new values to get a computed value out + style.minWidth = style.maxWidth = style.width = ret; + ret = computed.width; + + // Revert the changed values + style.width = width; + style.minWidth = minWidth; + style.maxWidth = maxWidth; + } + } + + return ret !== undefined ? + + // Support: IE <=9 - 11 only + // IE returns zIndex value as an integer. + ret + "" : + ret; +} + + +function addGetHookIf( conditionFn, hookFn ) { + + // Define the hook, we'll check on the first run if it's really needed. + return { + get: function() { + if ( conditionFn() ) { + + // Hook not needed (or it's not possible to use it due + // to missing dependency), remove it. + delete this.get; + return; + } + + // Hook needed; redefine it so that the support test is not executed again. + return ( this.get = hookFn ).apply( this, arguments ); + } + }; +} + + +var + + // Swappable if display is none or starts with table + // except "table", "table-cell", or "table-caption" + // See here for display values: https://developer.mozilla.org/en-US/docs/CSS/display + rdisplayswap = /^(none|table(?!-c[ea]).+)/, + rcustomProp = /^--/, + cssShow = { position: "absolute", visibility: "hidden", display: "block" }, + cssNormalTransform = { + letterSpacing: "0", + fontWeight: "400" + }, + + cssPrefixes = [ "Webkit", "Moz", "ms" ], + emptyStyle = document.createElement( "div" ).style; + +// Return a css property mapped to a potentially vendor prefixed property +function vendorPropName( name ) { + + // Shortcut for names that are not vendor prefixed + if ( name in emptyStyle ) { + return name; + } + + // Check for vendor prefixed names + var capName = name[ 0 ].toUpperCase() + name.slice( 1 ), + i = cssPrefixes.length; + + while ( i-- ) { + name = cssPrefixes[ i ] + capName; + if ( name in emptyStyle ) { + return name; + } + } +} + +// Return a property mapped along what jQuery.cssProps suggests or to +// a vendor prefixed property. +function finalPropName( name ) { + var ret = jQuery.cssProps[ name ]; + if ( !ret ) { + ret = jQuery.cssProps[ name ] = vendorPropName( name ) || name; + } + return ret; +} + +function setPositiveNumber( elem, value, subtract ) { + + // Any relative (+/-) values have already been + // normalized at this point + var matches = rcssNum.exec( value ); + return matches ? + + // Guard against undefined "subtract", e.g., when used as in cssHooks + Math.max( 0, matches[ 2 ] - ( subtract || 0 ) ) + ( matches[ 3 ] || "px" ) : + value; +} + +function augmentWidthOrHeight( elem, name, extra, isBorderBox, styles ) { + var i, + val = 0; + + // If we already have the right measurement, avoid augmentation + if ( extra === ( isBorderBox ? "border" : "content" ) ) { + i = 4; + + // Otherwise initialize for horizontal or vertical properties + } else { + i = name === "width" ? 1 : 0; + } + + for ( ; i < 4; i += 2 ) { + + // Both box models exclude margin, so add it if we want it + if ( extra === "margin" ) { + val += jQuery.css( elem, extra + cssExpand[ i ], true, styles ); + } + + if ( isBorderBox ) { + + // border-box includes padding, so remove it if we want content + if ( extra === "content" ) { + val -= jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); + } + + // At this point, extra isn't border nor margin, so remove border + if ( extra !== "margin" ) { + val -= jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); + } + } else { + + // At this point, extra isn't content, so add padding + val += jQuery.css( elem, "padding" + cssExpand[ i ], true, styles ); + + // At this point, extra isn't content nor padding, so add border + if ( extra !== "padding" ) { + val += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles ); + } + } + } + + return val; +} + +function getWidthOrHeight( elem, name, extra ) { + + // Start with computed style + var valueIsBorderBox, + styles = getStyles( elem ), + val = curCSS( elem, name, styles ), + isBorderBox = jQuery.css( elem, "boxSizing", false, styles ) === "border-box"; + + // Computed unit is not pixels. Stop here and return. + if ( rnumnonpx.test( val ) ) { + return val; + } + + // Check for style in case a browser which returns unreliable values + // for getComputedStyle silently falls back to the reliable elem.style + valueIsBorderBox = isBorderBox && + ( support.boxSizingReliable() || val === elem.style[ name ] ); + + // Fall back to offsetWidth/Height when value is "auto" + // This happens for inline elements with no explicit setting (gh-3571) + if ( val === "auto" ) { + val = elem[ "offset" + name[ 0 ].toUpperCase() + name.slice( 1 ) ]; + } + + // Normalize "", auto, and prepare for extra + val = parseFloat( val ) || 0; + + // Use the active box-sizing model to add/subtract irrelevant styles + return ( val + + augmentWidthOrHeight( + elem, + name, + extra || ( isBorderBox ? "border" : "content" ), + valueIsBorderBox, + styles + ) + ) + "px"; +} + +jQuery.extend( { + + // Add in style property hooks for overriding the default + // behavior of getting and setting a style property + cssHooks: { + opacity: { + get: function( elem, computed ) { + if ( computed ) { + + // We should always get a number back from opacity + var ret = curCSS( elem, "opacity" ); + return ret === "" ? "1" : ret; + } + } + } + }, + + // Don't automatically add "px" to these possibly-unitless properties + cssNumber: { + "animationIterationCount": true, + "columnCount": true, + "fillOpacity": true, + "flexGrow": true, + "flexShrink": true, + "fontWeight": true, + "lineHeight": true, + "opacity": true, + "order": true, + "orphans": true, + "widows": true, + "zIndex": true, + "zoom": true + }, + + // Add in properties whose names you wish to fix before + // setting or getting the value + cssProps: { + "float": "cssFloat" + }, + + // Get and set the style property on a DOM Node + style: function( elem, name, value, extra ) { + + // Don't set styles on text and comment nodes + if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) { + return; + } + + // Make sure that we're working with the right name + var ret, type, hooks, + origName = jQuery.camelCase( name ), + isCustomProp = rcustomProp.test( name ), + style = elem.style; + + // Make sure that we're working with the right name. We don't + // want to query the value if it is a CSS custom property + // since they are user-defined. + if ( !isCustomProp ) { + name = finalPropName( origName ); + } + + // Gets hook for the prefixed version, then unprefixed version + hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; + + // Check if we're setting a value + if ( value !== undefined ) { + type = typeof value; + + // Convert "+=" or "-=" to relative numbers (#7345) + if ( type === "string" && ( ret = rcssNum.exec( value ) ) && ret[ 1 ] ) { + value = adjustCSS( elem, name, ret ); + + // Fixes bug #9237 + type = "number"; + } + + // Make sure that null and NaN values aren't set (#7116) + if ( value == null || value !== value ) { + return; + } + + // If a number was passed in, add the unit (except for certain CSS properties) + if ( type === "number" ) { + value += ret && ret[ 3 ] || ( jQuery.cssNumber[ origName ] ? "" : "px" ); + } + + // background-* props affect original clone's values + if ( !support.clearCloneStyle && value === "" && name.indexOf( "background" ) === 0 ) { + style[ name ] = "inherit"; + } + + // If a hook was provided, use that value, otherwise just set the specified value + if ( !hooks || !( "set" in hooks ) || + ( value = hooks.set( elem, value, extra ) ) !== undefined ) { + + if ( isCustomProp ) { + style.setProperty( name, value ); + } else { + style[ name ] = value; + } + } + + } else { + + // If a hook was provided get the non-computed value from there + if ( hooks && "get" in hooks && + ( ret = hooks.get( elem, false, extra ) ) !== undefined ) { + + return ret; + } + + // Otherwise just get the value from the style object + return style[ name ]; + } + }, + + css: function( elem, name, extra, styles ) { + var val, num, hooks, + origName = jQuery.camelCase( name ), + isCustomProp = rcustomProp.test( name ); + + // Make sure that we're working with the right name. We don't + // want to modify the value if it is a CSS custom property + // since they are user-defined. + if ( !isCustomProp ) { + name = finalPropName( origName ); + } + + // Try prefixed name followed by the unprefixed name + hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ]; + + // If a hook was provided get the computed value from there + if ( hooks && "get" in hooks ) { + val = hooks.get( elem, true, extra ); + } + + // Otherwise, if a way to get the computed value exists, use that + if ( val === undefined ) { + val = curCSS( elem, name, styles ); + } + + // Convert "normal" to computed value + if ( val === "normal" && name in cssNormalTransform ) { + val = cssNormalTransform[ name ]; + } + + // Make numeric if forced or a qualifier was provided and val looks numeric + if ( extra === "" || extra ) { + num = parseFloat( val ); + return extra === true || isFinite( num ) ? num || 0 : val; + } + + return val; + } +} ); + +jQuery.each( [ "height", "width" ], function( i, name ) { + jQuery.cssHooks[ name ] = { + get: function( elem, computed, extra ) { + if ( computed ) { + + // Certain elements can have dimension info if we invisibly show them + // but it must have a current display style that would benefit + return rdisplayswap.test( jQuery.css( elem, "display" ) ) && + + // Support: Safari 8+ + // Table columns in Safari have non-zero offsetWidth & zero + // getBoundingClientRect().width unless display is changed. + // Support: IE <=11 only + // Running getBoundingClientRect on a disconnected node + // in IE throws an error. + ( !elem.getClientRects().length || !elem.getBoundingClientRect().width ) ? + swap( elem, cssShow, function() { + return getWidthOrHeight( elem, name, extra ); + } ) : + getWidthOrHeight( elem, name, extra ); + } + }, + + set: function( elem, value, extra ) { + var matches, + styles = extra && getStyles( elem ), + subtract = extra && augmentWidthOrHeight( + elem, + name, + extra, + jQuery.css( elem, "boxSizing", false, styles ) === "border-box", + styles + ); + + // Convert to pixels if value adjustment is needed + if ( subtract && ( matches = rcssNum.exec( value ) ) && + ( matches[ 3 ] || "px" ) !== "px" ) { + + elem.style[ name ] = value; + value = jQuery.css( elem, name ); + } + + return setPositiveNumber( elem, value, subtract ); + } + }; +} ); + +jQuery.cssHooks.marginLeft = addGetHookIf( support.reliableMarginLeft, + function( elem, computed ) { + if ( computed ) { + return ( parseFloat( curCSS( elem, "marginLeft" ) ) || + elem.getBoundingClientRect().left - + swap( elem, { marginLeft: 0 }, function() { + return elem.getBoundingClientRect().left; + } ) + ) + "px"; + } + } +); + +// These hooks are used by animate to expand properties +jQuery.each( { + margin: "", + padding: "", + border: "Width" +}, function( prefix, suffix ) { + jQuery.cssHooks[ prefix + suffix ] = { + expand: function( value ) { + var i = 0, + expanded = {}, + + // Assumes a single number if not a string + parts = typeof value === "string" ? value.split( " " ) : [ value ]; + + for ( ; i < 4; i++ ) { + expanded[ prefix + cssExpand[ i ] + suffix ] = + parts[ i ] || parts[ i - 2 ] || parts[ 0 ]; + } + + return expanded; + } + }; + + if ( !rmargin.test( prefix ) ) { + jQuery.cssHooks[ prefix + suffix ].set = setPositiveNumber; + } +} ); + +jQuery.fn.extend( { + css: function( name, value ) { + return access( this, function( elem, name, value ) { + var styles, len, + map = {}, + i = 0; + + if ( Array.isArray( name ) ) { + styles = getStyles( elem ); + len = name.length; + + for ( ; i < len; i++ ) { + map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles ); + } + + return map; + } + + return value !== undefined ? + jQuery.style( elem, name, value ) : + jQuery.css( elem, name ); + }, name, value, arguments.length > 1 ); + } +} ); + + +function Tween( elem, options, prop, end, easing ) { + return new Tween.prototype.init( elem, options, prop, end, easing ); +} +jQuery.Tween = Tween; + +Tween.prototype = { + constructor: Tween, + init: function( elem, options, prop, end, easing, unit ) { + this.elem = elem; + this.prop = prop; + this.easing = easing || jQuery.easing._default; + this.options = options; + this.start = this.now = this.cur(); + this.end = end; + this.unit = unit || ( jQuery.cssNumber[ prop ] ? "" : "px" ); + }, + cur: function() { + var hooks = Tween.propHooks[ this.prop ]; + + return hooks && hooks.get ? + hooks.get( this ) : + Tween.propHooks._default.get( this ); + }, + run: function( percent ) { + var eased, + hooks = Tween.propHooks[ this.prop ]; + + if ( this.options.duration ) { + this.pos = eased = jQuery.easing[ this.easing ]( + percent, this.options.duration * percent, 0, 1, this.options.duration + ); + } else { + this.pos = eased = percent; + } + this.now = ( this.end - this.start ) * eased + this.start; + + if ( this.options.step ) { + this.options.step.call( this.elem, this.now, this ); + } + + if ( hooks && hooks.set ) { + hooks.set( this ); + } else { + Tween.propHooks._default.set( this ); + } + return this; + } +}; + +Tween.prototype.init.prototype = Tween.prototype; + +Tween.propHooks = { + _default: { + get: function( tween ) { + var result; + + // Use a property on the element directly when it is not a DOM element, + // or when there is no matching style property that exists. + if ( tween.elem.nodeType !== 1 || + tween.elem[ tween.prop ] != null && tween.elem.style[ tween.prop ] == null ) { + return tween.elem[ tween.prop ]; + } + + // Passing an empty string as a 3rd parameter to .css will automatically + // attempt a parseFloat and fallback to a string if the parse fails. + // Simple values such as "10px" are parsed to Float; + // complex values such as "rotate(1rad)" are returned as-is. + result = jQuery.css( tween.elem, tween.prop, "" ); + + // Empty strings, null, undefined and "auto" are converted to 0. + return !result || result === "auto" ? 0 : result; + }, + set: function( tween ) { + + // Use step hook for back compat. + // Use cssHook if its there. + // Use .style if available and use plain properties where available. + if ( jQuery.fx.step[ tween.prop ] ) { + jQuery.fx.step[ tween.prop ]( tween ); + } else if ( tween.elem.nodeType === 1 && + ( tween.elem.style[ jQuery.cssProps[ tween.prop ] ] != null || + jQuery.cssHooks[ tween.prop ] ) ) { + jQuery.style( tween.elem, tween.prop, tween.now + tween.unit ); + } else { + tween.elem[ tween.prop ] = tween.now; + } + } + } +}; + +// Support: IE <=9 only +// Panic based approach to setting things on disconnected nodes +Tween.propHooks.scrollTop = Tween.propHooks.scrollLeft = { + set: function( tween ) { + if ( tween.elem.nodeType && tween.elem.parentNode ) { + tween.elem[ tween.prop ] = tween.now; + } + } +}; + +jQuery.easing = { + linear: function( p ) { + return p; + }, + swing: function( p ) { + return 0.5 - Math.cos( p * Math.PI ) / 2; + }, + _default: "swing" +}; + +jQuery.fx = Tween.prototype.init; + +// Back compat <1.8 extension point +jQuery.fx.step = {}; + + + + +var + fxNow, inProgress, + rfxtypes = /^(?:toggle|show|hide)$/, + rrun = /queueHooks$/; + +function schedule() { + if ( inProgress ) { + if ( document.hidden === false && window.requestAnimationFrame ) { + window.requestAnimationFrame( schedule ); + } else { + window.setTimeout( schedule, jQuery.fx.interval ); + } + + jQuery.fx.tick(); + } +} + +// Animations created synchronously will run synchronously +function createFxNow() { + window.setTimeout( function() { + fxNow = undefined; + } ); + return ( fxNow = jQuery.now() ); +} + +// Generate parameters to create a standard animation +function genFx( type, includeWidth ) { + var which, + i = 0, + attrs = { height: type }; + + // If we include width, step value is 1 to do all cssExpand values, + // otherwise step value is 2 to skip over Left and Right + includeWidth = includeWidth ? 1 : 0; + for ( ; i < 4; i += 2 - includeWidth ) { + which = cssExpand[ i ]; + attrs[ "margin" + which ] = attrs[ "padding" + which ] = type; + } + + if ( includeWidth ) { + attrs.opacity = attrs.width = type; + } + + return attrs; +} + +function createTween( value, prop, animation ) { + var tween, + collection = ( Animation.tweeners[ prop ] || [] ).concat( Animation.tweeners[ "*" ] ), + index = 0, + length = collection.length; + for ( ; index < length; index++ ) { + if ( ( tween = collection[ index ].call( animation, prop, value ) ) ) { + + // We're done with this property + return tween; + } + } +} + +function defaultPrefilter( elem, props, opts ) { + var prop, value, toggle, hooks, oldfire, propTween, restoreDisplay, display, + isBox = "width" in props || "height" in props, + anim = this, + orig = {}, + style = elem.style, + hidden = elem.nodeType && isHiddenWithinTree( elem ), + dataShow = dataPriv.get( elem, "fxshow" ); + + // Queue-skipping animations hijack the fx hooks + if ( !opts.queue ) { + hooks = jQuery._queueHooks( elem, "fx" ); + if ( hooks.unqueued == null ) { + hooks.unqueued = 0; + oldfire = hooks.empty.fire; + hooks.empty.fire = function() { + if ( !hooks.unqueued ) { + oldfire(); + } + }; + } + hooks.unqueued++; + + anim.always( function() { + + // Ensure the complete handler is called before this completes + anim.always( function() { + hooks.unqueued--; + if ( !jQuery.queue( elem, "fx" ).length ) { + hooks.empty.fire(); + } + } ); + } ); + } + + // Detect show/hide animations + for ( prop in props ) { + value = props[ prop ]; + if ( rfxtypes.test( value ) ) { + delete props[ prop ]; + toggle = toggle || value === "toggle"; + if ( value === ( hidden ? "hide" : "show" ) ) { + + // Pretend to be hidden if this is a "show" and + // there is still data from a stopped show/hide + if ( value === "show" && dataShow && dataShow[ prop ] !== undefined ) { + hidden = true; + + // Ignore all other no-op show/hide data + } else { + continue; + } + } + orig[ prop ] = dataShow && dataShow[ prop ] || jQuery.style( elem, prop ); + } + } + + // Bail out if this is a no-op like .hide().hide() + propTween = !jQuery.isEmptyObject( props ); + if ( !propTween && jQuery.isEmptyObject( orig ) ) { + return; + } + + // Restrict "overflow" and "display" styles during box animations + if ( isBox && elem.nodeType === 1 ) { + + // Support: IE <=9 - 11, Edge 12 - 13 + // Record all 3 overflow attributes because IE does not infer the shorthand + // from identically-valued overflowX and overflowY + opts.overflow = [ style.overflow, style.overflowX, style.overflowY ]; + + // Identify a display type, preferring old show/hide data over the CSS cascade + restoreDisplay = dataShow && dataShow.display; + if ( restoreDisplay == null ) { + restoreDisplay = dataPriv.get( elem, "display" ); + } + display = jQuery.css( elem, "display" ); + if ( display === "none" ) { + if ( restoreDisplay ) { + display = restoreDisplay; + } else { + + // Get nonempty value(s) by temporarily forcing visibility + showHide( [ elem ], true ); + restoreDisplay = elem.style.display || restoreDisplay; + display = jQuery.css( elem, "display" ); + showHide( [ elem ] ); + } + } + + // Animate inline elements as inline-block + if ( display === "inline" || display === "inline-block" && restoreDisplay != null ) { + if ( jQuery.css( elem, "float" ) === "none" ) { + + // Restore the original display value at the end of pure show/hide animations + if ( !propTween ) { + anim.done( function() { + style.display = restoreDisplay; + } ); + if ( restoreDisplay == null ) { + display = style.display; + restoreDisplay = display === "none" ? "" : display; + } + } + style.display = "inline-block"; + } + } + } + + if ( opts.overflow ) { + style.overflow = "hidden"; + anim.always( function() { + style.overflow = opts.overflow[ 0 ]; + style.overflowX = opts.overflow[ 1 ]; + style.overflowY = opts.overflow[ 2 ]; + } ); + } + + // Implement show/hide animations + propTween = false; + for ( prop in orig ) { + + // General show/hide setup for this element animation + if ( !propTween ) { + if ( dataShow ) { + if ( "hidden" in dataShow ) { + hidden = dataShow.hidden; + } + } else { + dataShow = dataPriv.access( elem, "fxshow", { display: restoreDisplay } ); + } + + // Store hidden/visible for toggle so `.stop().toggle()` "reverses" + if ( toggle ) { + dataShow.hidden = !hidden; + } + + // Show elements before animating them + if ( hidden ) { + showHide( [ elem ], true ); + } + + /* eslint-disable no-loop-func */ + + anim.done( function() { + + /* eslint-enable no-loop-func */ + + // The final step of a "hide" animation is actually hiding the element + if ( !hidden ) { + showHide( [ elem ] ); + } + dataPriv.remove( elem, "fxshow" ); + for ( prop in orig ) { + jQuery.style( elem, prop, orig[ prop ] ); + } + } ); + } + + // Per-property setup + propTween = createTween( hidden ? dataShow[ prop ] : 0, prop, anim ); + if ( !( prop in dataShow ) ) { + dataShow[ prop ] = propTween.start; + if ( hidden ) { + propTween.end = propTween.start; + propTween.start = 0; + } + } + } +} + +function propFilter( props, specialEasing ) { + var index, name, easing, value, hooks; + + // camelCase, specialEasing and expand cssHook pass + for ( index in props ) { + name = jQuery.camelCase( index ); + easing = specialEasing[ name ]; + value = props[ index ]; + if ( Array.isArray( value ) ) { + easing = value[ 1 ]; + value = props[ index ] = value[ 0 ]; + } + + if ( index !== name ) { + props[ name ] = value; + delete props[ index ]; + } + + hooks = jQuery.cssHooks[ name ]; + if ( hooks && "expand" in hooks ) { + value = hooks.expand( value ); + delete props[ name ]; + + // Not quite $.extend, this won't overwrite existing keys. + // Reusing 'index' because we have the correct "name" + for ( index in value ) { + if ( !( index in props ) ) { + props[ index ] = value[ index ]; + specialEasing[ index ] = easing; + } + } + } else { + specialEasing[ name ] = easing; + } + } +} + +function Animation( elem, properties, options ) { + var result, + stopped, + index = 0, + length = Animation.prefilters.length, + deferred = jQuery.Deferred().always( function() { + + // Don't match elem in the :animated selector + delete tick.elem; + } ), + tick = function() { + if ( stopped ) { + return false; + } + var currentTime = fxNow || createFxNow(), + remaining = Math.max( 0, animation.startTime + animation.duration - currentTime ), + + // Support: Android 2.3 only + // Archaic crash bug won't allow us to use `1 - ( 0.5 || 0 )` (#12497) + temp = remaining / animation.duration || 0, + percent = 1 - temp, + index = 0, + length = animation.tweens.length; + + for ( ; index < length; index++ ) { + animation.tweens[ index ].run( percent ); + } + + deferred.notifyWith( elem, [ animation, percent, remaining ] ); + + // If there's more to do, yield + if ( percent < 1 && length ) { + return remaining; + } + + // If this was an empty animation, synthesize a final progress notification + if ( !length ) { + deferred.notifyWith( elem, [ animation, 1, 0 ] ); + } + + // Resolve the animation and report its conclusion + deferred.resolveWith( elem, [ animation ] ); + return false; + }, + animation = deferred.promise( { + elem: elem, + props: jQuery.extend( {}, properties ), + opts: jQuery.extend( true, { + specialEasing: {}, + easing: jQuery.easing._default + }, options ), + originalProperties: properties, + originalOptions: options, + startTime: fxNow || createFxNow(), + duration: options.duration, + tweens: [], + createTween: function( prop, end ) { + var tween = jQuery.Tween( elem, animation.opts, prop, end, + animation.opts.specialEasing[ prop ] || animation.opts.easing ); + animation.tweens.push( tween ); + return tween; + }, + stop: function( gotoEnd ) { + var index = 0, + + // If we are going to the end, we want to run all the tweens + // otherwise we skip this part + length = gotoEnd ? animation.tweens.length : 0; + if ( stopped ) { + return this; + } + stopped = true; + for ( ; index < length; index++ ) { + animation.tweens[ index ].run( 1 ); + } + + // Resolve when we played the last frame; otherwise, reject + if ( gotoEnd ) { + deferred.notifyWith( elem, [ animation, 1, 0 ] ); + deferred.resolveWith( elem, [ animation, gotoEnd ] ); + } else { + deferred.rejectWith( elem, [ animation, gotoEnd ] ); + } + return this; + } + } ), + props = animation.props; + + propFilter( props, animation.opts.specialEasing ); + + for ( ; index < length; index++ ) { + result = Animation.prefilters[ index ].call( animation, elem, props, animation.opts ); + if ( result ) { + if ( jQuery.isFunction( result.stop ) ) { + jQuery._queueHooks( animation.elem, animation.opts.queue ).stop = + jQuery.proxy( result.stop, result ); + } + return result; + } + } + + jQuery.map( props, createTween, animation ); + + if ( jQuery.isFunction( animation.opts.start ) ) { + animation.opts.start.call( elem, animation ); + } + + // Attach callbacks from options + animation + .progress( animation.opts.progress ) + .done( animation.opts.done, animation.opts.complete ) + .fail( animation.opts.fail ) + .always( animation.opts.always ); + + jQuery.fx.timer( + jQuery.extend( tick, { + elem: elem, + anim: animation, + queue: animation.opts.queue + } ) + ); + + return animation; +} + +jQuery.Animation = jQuery.extend( Animation, { + + tweeners: { + "*": [ function( prop, value ) { + var tween = this.createTween( prop, value ); + adjustCSS( tween.elem, prop, rcssNum.exec( value ), tween ); + return tween; + } ] + }, + + tweener: function( props, callback ) { + if ( jQuery.isFunction( props ) ) { + callback = props; + props = [ "*" ]; + } else { + props = props.match( rnothtmlwhite ); + } + + var prop, + index = 0, + length = props.length; + + for ( ; index < length; index++ ) { + prop = props[ index ]; + Animation.tweeners[ prop ] = Animation.tweeners[ prop ] || []; + Animation.tweeners[ prop ].unshift( callback ); + } + }, + + prefilters: [ defaultPrefilter ], + + prefilter: function( callback, prepend ) { + if ( prepend ) { + Animation.prefilters.unshift( callback ); + } else { + Animation.prefilters.push( callback ); + } + } +} ); + +jQuery.speed = function( speed, easing, fn ) { + var opt = speed && typeof speed === "object" ? jQuery.extend( {}, speed ) : { + complete: fn || !fn && easing || + jQuery.isFunction( speed ) && speed, + duration: speed, + easing: fn && easing || easing && !jQuery.isFunction( easing ) && easing + }; + + // Go to the end state if fx are off + if ( jQuery.fx.off ) { + opt.duration = 0; + + } else { + if ( typeof opt.duration !== "number" ) { + if ( opt.duration in jQuery.fx.speeds ) { + opt.duration = jQuery.fx.speeds[ opt.duration ]; + + } else { + opt.duration = jQuery.fx.speeds._default; + } + } + } + + // Normalize opt.queue - true/undefined/null -> "fx" + if ( opt.queue == null || opt.queue === true ) { + opt.queue = "fx"; + } + + // Queueing + opt.old = opt.complete; + + opt.complete = function() { + if ( jQuery.isFunction( opt.old ) ) { + opt.old.call( this ); + } + + if ( opt.queue ) { + jQuery.dequeue( this, opt.queue ); + } + }; + + return opt; +}; + +jQuery.fn.extend( { + fadeTo: function( speed, to, easing, callback ) { + + // Show any hidden elements after setting opacity to 0 + return this.filter( isHiddenWithinTree ).css( "opacity", 0 ).show() + + // Animate to the value specified + .end().animate( { opacity: to }, speed, easing, callback ); + }, + animate: function( prop, speed, easing, callback ) { + var empty = jQuery.isEmptyObject( prop ), + optall = jQuery.speed( speed, easing, callback ), + doAnimation = function() { + + // Operate on a copy of prop so per-property easing won't be lost + var anim = Animation( this, jQuery.extend( {}, prop ), optall ); + + // Empty animations, or finishing resolves immediately + if ( empty || dataPriv.get( this, "finish" ) ) { + anim.stop( true ); + } + }; + doAnimation.finish = doAnimation; + + return empty || optall.queue === false ? + this.each( doAnimation ) : + this.queue( optall.queue, doAnimation ); + }, + stop: function( type, clearQueue, gotoEnd ) { + var stopQueue = function( hooks ) { + var stop = hooks.stop; + delete hooks.stop; + stop( gotoEnd ); + }; + + if ( typeof type !== "string" ) { + gotoEnd = clearQueue; + clearQueue = type; + type = undefined; + } + if ( clearQueue && type !== false ) { + this.queue( type || "fx", [] ); + } + + return this.each( function() { + var dequeue = true, + index = type != null && type + "queueHooks", + timers = jQuery.timers, + data = dataPriv.get( this ); + + if ( index ) { + if ( data[ index ] && data[ index ].stop ) { + stopQueue( data[ index ] ); + } + } else { + for ( index in data ) { + if ( data[ index ] && data[ index ].stop && rrun.test( index ) ) { + stopQueue( data[ index ] ); + } + } + } + + for ( index = timers.length; index--; ) { + if ( timers[ index ].elem === this && + ( type == null || timers[ index ].queue === type ) ) { + + timers[ index ].anim.stop( gotoEnd ); + dequeue = false; + timers.splice( index, 1 ); + } + } + + // Start the next in the queue if the last step wasn't forced. + // Timers currently will call their complete callbacks, which + // will dequeue but only if they were gotoEnd. + if ( dequeue || !gotoEnd ) { + jQuery.dequeue( this, type ); + } + } ); + }, + finish: function( type ) { + if ( type !== false ) { + type = type || "fx"; + } + return this.each( function() { + var index, + data = dataPriv.get( this ), + queue = data[ type + "queue" ], + hooks = data[ type + "queueHooks" ], + timers = jQuery.timers, + length = queue ? queue.length : 0; + + // Enable finishing flag on private data + data.finish = true; + + // Empty the queue first + jQuery.queue( this, type, [] ); + + if ( hooks && hooks.stop ) { + hooks.stop.call( this, true ); + } + + // Look for any active animations, and finish them + for ( index = timers.length; index--; ) { + if ( timers[ index ].elem === this && timers[ index ].queue === type ) { + timers[ index ].anim.stop( true ); + timers.splice( index, 1 ); + } + } + + // Look for any animations in the old queue and finish them + for ( index = 0; index < length; index++ ) { + if ( queue[ index ] && queue[ index ].finish ) { + queue[ index ].finish.call( this ); + } + } + + // Turn off finishing flag + delete data.finish; + } ); + } +} ); + +jQuery.each( [ "toggle", "show", "hide" ], function( i, name ) { + var cssFn = jQuery.fn[ name ]; + jQuery.fn[ name ] = function( speed, easing, callback ) { + return speed == null || typeof speed === "boolean" ? + cssFn.apply( this, arguments ) : + this.animate( genFx( name, true ), speed, easing, callback ); + }; +} ); + +// Generate shortcuts for custom animations +jQuery.each( { + slideDown: genFx( "show" ), + slideUp: genFx( "hide" ), + slideToggle: genFx( "toggle" ), + fadeIn: { opacity: "show" }, + fadeOut: { opacity: "hide" }, + fadeToggle: { opacity: "toggle" } +}, function( name, props ) { + jQuery.fn[ name ] = function( speed, easing, callback ) { + return this.animate( props, speed, easing, callback ); + }; +} ); + +jQuery.timers = []; +jQuery.fx.tick = function() { + var timer, + i = 0, + timers = jQuery.timers; + + fxNow = jQuery.now(); + + for ( ; i < timers.length; i++ ) { + timer = timers[ i ]; + + // Run the timer and safely remove it when done (allowing for external removal) + if ( !timer() && timers[ i ] === timer ) { + timers.splice( i--, 1 ); + } + } + + if ( !timers.length ) { + jQuery.fx.stop(); + } + fxNow = undefined; +}; + +jQuery.fx.timer = function( timer ) { + jQuery.timers.push( timer ); + jQuery.fx.start(); +}; + +jQuery.fx.interval = 13; +jQuery.fx.start = function() { + if ( inProgress ) { + return; + } + + inProgress = true; + schedule(); +}; + +jQuery.fx.stop = function() { + inProgress = null; +}; + +jQuery.fx.speeds = { + slow: 600, + fast: 200, + + // Default speed + _default: 400 +}; + + +// Based off of the plugin by Clint Helfers, with permission. +// https://web.archive.org/web/20100324014747/http://blindsignals.com/index.php/2009/07/jquery-delay/ +jQuery.fn.delay = function( time, type ) { + time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time; + type = type || "fx"; + + return this.queue( type, function( next, hooks ) { + var timeout = window.setTimeout( next, time ); + hooks.stop = function() { + window.clearTimeout( timeout ); + }; + } ); +}; + + +( function() { + var input = document.createElement( "input" ), + select = document.createElement( "select" ), + opt = select.appendChild( document.createElement( "option" ) ); + + input.type = "checkbox"; + + // Support: Android <=4.3 only + // Default value for a checkbox should be "on" + support.checkOn = input.value !== ""; + + // Support: IE <=11 only + // Must access selectedIndex to make default options select + support.optSelected = opt.selected; + + // Support: IE <=11 only + // An input loses its value after becoming a radio + input = document.createElement( "input" ); + input.value = "t"; + input.type = "radio"; + support.radioValue = input.value === "t"; +} )(); + + +var boolHook, + attrHandle = jQuery.expr.attrHandle; + +jQuery.fn.extend( { + attr: function( name, value ) { + return access( this, jQuery.attr, name, value, arguments.length > 1 ); + }, + + removeAttr: function( name ) { + return this.each( function() { + jQuery.removeAttr( this, name ); + } ); + } +} ); + +jQuery.extend( { + attr: function( elem, name, value ) { + var ret, hooks, + nType = elem.nodeType; + + // Don't get/set attributes on text, comment and attribute nodes + if ( nType === 3 || nType === 8 || nType === 2 ) { + return; + } + + // Fallback to prop when attributes are not supported + if ( typeof elem.getAttribute === "undefined" ) { + return jQuery.prop( elem, name, value ); + } + + // Attribute hooks are determined by the lowercase version + // Grab necessary hook if one is defined + if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { + hooks = jQuery.attrHooks[ name.toLowerCase() ] || + ( jQuery.expr.match.bool.test( name ) ? boolHook : undefined ); + } + + if ( value !== undefined ) { + if ( value === null ) { + jQuery.removeAttr( elem, name ); + return; + } + + if ( hooks && "set" in hooks && + ( ret = hooks.set( elem, value, name ) ) !== undefined ) { + return ret; + } + + elem.setAttribute( name, value + "" ); + return value; + } + + if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { + return ret; + } + + ret = jQuery.find.attr( elem, name ); + + // Non-existent attributes return null, we normalize to undefined + return ret == null ? undefined : ret; + }, + + attrHooks: { + type: { + set: function( elem, value ) { + if ( !support.radioValue && value === "radio" && + nodeName( elem, "input" ) ) { + var val = elem.value; + elem.setAttribute( "type", value ); + if ( val ) { + elem.value = val; + } + return value; + } + } + } + }, + + removeAttr: function( elem, value ) { + var name, + i = 0, + + // Attribute names can contain non-HTML whitespace characters + // https://html.spec.whatwg.org/multipage/syntax.html#attributes-2 + attrNames = value && value.match( rnothtmlwhite ); + + if ( attrNames && elem.nodeType === 1 ) { + while ( ( name = attrNames[ i++ ] ) ) { + elem.removeAttribute( name ); + } + } + } +} ); + +// Hooks for boolean attributes +boolHook = { + set: function( elem, value, name ) { + if ( value === false ) { + + // Remove boolean attributes when set to false + jQuery.removeAttr( elem, name ); + } else { + elem.setAttribute( name, name ); + } + return name; + } +}; + +jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( i, name ) { + var getter = attrHandle[ name ] || jQuery.find.attr; + + attrHandle[ name ] = function( elem, name, isXML ) { + var ret, handle, + lowercaseName = name.toLowerCase(); + + if ( !isXML ) { + + // Avoid an infinite loop by temporarily removing this function from the getter + handle = attrHandle[ lowercaseName ]; + attrHandle[ lowercaseName ] = ret; + ret = getter( elem, name, isXML ) != null ? + lowercaseName : + null; + attrHandle[ lowercaseName ] = handle; + } + return ret; + }; +} ); + + + + +var rfocusable = /^(?:input|select|textarea|button)$/i, + rclickable = /^(?:a|area)$/i; + +jQuery.fn.extend( { + prop: function( name, value ) { + return access( this, jQuery.prop, name, value, arguments.length > 1 ); + }, + + removeProp: function( name ) { + return this.each( function() { + delete this[ jQuery.propFix[ name ] || name ]; + } ); + } +} ); + +jQuery.extend( { + prop: function( elem, name, value ) { + var ret, hooks, + nType = elem.nodeType; + + // Don't get/set properties on text, comment and attribute nodes + if ( nType === 3 || nType === 8 || nType === 2 ) { + return; + } + + if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) { + + // Fix name and attach hooks + name = jQuery.propFix[ name ] || name; + hooks = jQuery.propHooks[ name ]; + } + + if ( value !== undefined ) { + if ( hooks && "set" in hooks && + ( ret = hooks.set( elem, value, name ) ) !== undefined ) { + return ret; + } + + return ( elem[ name ] = value ); + } + + if ( hooks && "get" in hooks && ( ret = hooks.get( elem, name ) ) !== null ) { + return ret; + } + + return elem[ name ]; + }, + + propHooks: { + tabIndex: { + get: function( elem ) { + + // Support: IE <=9 - 11 only + // elem.tabIndex doesn't always return the + // correct value when it hasn't been explicitly set + // https://web.archive.org/web/20141116233347/http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/ + // Use proper attribute retrieval(#12072) + var tabindex = jQuery.find.attr( elem, "tabindex" ); + + if ( tabindex ) { + return parseInt( tabindex, 10 ); + } + + if ( + rfocusable.test( elem.nodeName ) || + rclickable.test( elem.nodeName ) && + elem.href + ) { + return 0; + } + + return -1; + } + } + }, + + propFix: { + "for": "htmlFor", + "class": "className" + } +} ); + +// Support: IE <=11 only +// Accessing the selectedIndex property +// forces the browser to respect setting selected +// on the option +// The getter ensures a default option is selected +// when in an optgroup +// eslint rule "no-unused-expressions" is disabled for this code +// since it considers such accessions noop +if ( !support.optSelected ) { + jQuery.propHooks.selected = { + get: function( elem ) { + + /* eslint no-unused-expressions: "off" */ + + var parent = elem.parentNode; + if ( parent && parent.parentNode ) { + parent.parentNode.selectedIndex; + } + return null; + }, + set: function( elem ) { + + /* eslint no-unused-expressions: "off" */ + + var parent = elem.parentNode; + if ( parent ) { + parent.selectedIndex; + + if ( parent.parentNode ) { + parent.parentNode.selectedIndex; + } + } + } + }; +} + +jQuery.each( [ + "tabIndex", + "readOnly", + "maxLength", + "cellSpacing", + "cellPadding", + "rowSpan", + "colSpan", + "useMap", + "frameBorder", + "contentEditable" +], function() { + jQuery.propFix[ this.toLowerCase() ] = this; +} ); + + + + + // Strip and collapse whitespace according to HTML spec + // https://html.spec.whatwg.org/multipage/infrastructure.html#strip-and-collapse-whitespace + function stripAndCollapse( value ) { + var tokens = value.match( rnothtmlwhite ) || []; + return tokens.join( " " ); + } + + +function getClass( elem ) { + return elem.getAttribute && elem.getAttribute( "class" ) || ""; +} + +jQuery.fn.extend( { + addClass: function( value ) { + var classes, elem, cur, curValue, clazz, j, finalValue, + i = 0; + + if ( jQuery.isFunction( value ) ) { + return this.each( function( j ) { + jQuery( this ).addClass( value.call( this, j, getClass( this ) ) ); + } ); + } + + if ( typeof value === "string" && value ) { + classes = value.match( rnothtmlwhite ) || []; + + while ( ( elem = this[ i++ ] ) ) { + curValue = getClass( elem ); + cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); + + if ( cur ) { + j = 0; + while ( ( clazz = classes[ j++ ] ) ) { + if ( cur.indexOf( " " + clazz + " " ) < 0 ) { + cur += clazz + " "; + } + } + + // Only assign if different to avoid unneeded rendering. + finalValue = stripAndCollapse( cur ); + if ( curValue !== finalValue ) { + elem.setAttribute( "class", finalValue ); + } + } + } + } + + return this; + }, + + removeClass: function( value ) { + var classes, elem, cur, curValue, clazz, j, finalValue, + i = 0; + + if ( jQuery.isFunction( value ) ) { + return this.each( function( j ) { + jQuery( this ).removeClass( value.call( this, j, getClass( this ) ) ); + } ); + } + + if ( !arguments.length ) { + return this.attr( "class", "" ); + } + + if ( typeof value === "string" && value ) { + classes = value.match( rnothtmlwhite ) || []; + + while ( ( elem = this[ i++ ] ) ) { + curValue = getClass( elem ); + + // This expression is here for better compressibility (see addClass) + cur = elem.nodeType === 1 && ( " " + stripAndCollapse( curValue ) + " " ); + + if ( cur ) { + j = 0; + while ( ( clazz = classes[ j++ ] ) ) { + + // Remove *all* instances + while ( cur.indexOf( " " + clazz + " " ) > -1 ) { + cur = cur.replace( " " + clazz + " ", " " ); + } + } + + // Only assign if different to avoid unneeded rendering. + finalValue = stripAndCollapse( cur ); + if ( curValue !== finalValue ) { + elem.setAttribute( "class", finalValue ); + } + } + } + } + + return this; + }, + + toggleClass: function( value, stateVal ) { + var type = typeof value; + + if ( typeof stateVal === "boolean" && type === "string" ) { + return stateVal ? this.addClass( value ) : this.removeClass( value ); + } + + if ( jQuery.isFunction( value ) ) { + return this.each( function( i ) { + jQuery( this ).toggleClass( + value.call( this, i, getClass( this ), stateVal ), + stateVal + ); + } ); + } + + return this.each( function() { + var className, i, self, classNames; + + if ( type === "string" ) { + + // Toggle individual class names + i = 0; + self = jQuery( this ); + classNames = value.match( rnothtmlwhite ) || []; + + while ( ( className = classNames[ i++ ] ) ) { + + // Check each className given, space separated list + if ( self.hasClass( className ) ) { + self.removeClass( className ); + } else { + self.addClass( className ); + } + } + + // Toggle whole class name + } else if ( value === undefined || type === "boolean" ) { + className = getClass( this ); + if ( className ) { + + // Store className if set + dataPriv.set( this, "__className__", className ); + } + + // If the element has a class name or if we're passed `false`, + // then remove the whole classname (if there was one, the above saved it). + // Otherwise bring back whatever was previously saved (if anything), + // falling back to the empty string if nothing was stored. + if ( this.setAttribute ) { + this.setAttribute( "class", + className || value === false ? + "" : + dataPriv.get( this, "__className__" ) || "" + ); + } + } + } ); + }, + + hasClass: function( selector ) { + var className, elem, + i = 0; + + className = " " + selector + " "; + while ( ( elem = this[ i++ ] ) ) { + if ( elem.nodeType === 1 && + ( " " + stripAndCollapse( getClass( elem ) ) + " " ).indexOf( className ) > -1 ) { + return true; + } + } + + return false; + } +} ); + + + + +var rreturn = /\r/g; + +jQuery.fn.extend( { + val: function( value ) { + var hooks, ret, isFunction, + elem = this[ 0 ]; + + if ( !arguments.length ) { + if ( elem ) { + hooks = jQuery.valHooks[ elem.type ] || + jQuery.valHooks[ elem.nodeName.toLowerCase() ]; + + if ( hooks && + "get" in hooks && + ( ret = hooks.get( elem, "value" ) ) !== undefined + ) { + return ret; + } + + ret = elem.value; + + // Handle most common string cases + if ( typeof ret === "string" ) { + return ret.replace( rreturn, "" ); + } + + // Handle cases where value is null/undef or number + return ret == null ? "" : ret; + } + + return; + } + + isFunction = jQuery.isFunction( value ); + + return this.each( function( i ) { + var val; + + if ( this.nodeType !== 1 ) { + return; + } + + if ( isFunction ) { + val = value.call( this, i, jQuery( this ).val() ); + } else { + val = value; + } + + // Treat null/undefined as ""; convert numbers to string + if ( val == null ) { + val = ""; + + } else if ( typeof val === "number" ) { + val += ""; + + } else if ( Array.isArray( val ) ) { + val = jQuery.map( val, function( value ) { + return value == null ? "" : value + ""; + } ); + } + + hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ]; + + // If set returns undefined, fall back to normal setting + if ( !hooks || !( "set" in hooks ) || hooks.set( this, val, "value" ) === undefined ) { + this.value = val; + } + } ); + } +} ); + +jQuery.extend( { + valHooks: { + option: { + get: function( elem ) { + + var val = jQuery.find.attr( elem, "value" ); + return val != null ? + val : + + // Support: IE <=10 - 11 only + // option.text throws exceptions (#14686, #14858) + // Strip and collapse whitespace + // https://html.spec.whatwg.org/#strip-and-collapse-whitespace + stripAndCollapse( jQuery.text( elem ) ); + } + }, + select: { + get: function( elem ) { + var value, option, i, + options = elem.options, + index = elem.selectedIndex, + one = elem.type === "select-one", + values = one ? null : [], + max = one ? index + 1 : options.length; + + if ( index < 0 ) { + i = max; + + } else { + i = one ? index : 0; + } + + // Loop through all the selected options + for ( ; i < max; i++ ) { + option = options[ i ]; + + // Support: IE <=9 only + // IE8-9 doesn't update selected after form reset (#2551) + if ( ( option.selected || i === index ) && + + // Don't return options that are disabled or in a disabled optgroup + !option.disabled && + ( !option.parentNode.disabled || + !nodeName( option.parentNode, "optgroup" ) ) ) { + + // Get the specific value for the option + value = jQuery( option ).val(); + + // We don't need an array for one selects + if ( one ) { + return value; + } + + // Multi-Selects return an array + values.push( value ); + } + } + + return values; + }, + + set: function( elem, value ) { + var optionSet, option, + options = elem.options, + values = jQuery.makeArray( value ), + i = options.length; + + while ( i-- ) { + option = options[ i ]; + + /* eslint-disable no-cond-assign */ + + if ( option.selected = + jQuery.inArray( jQuery.valHooks.option.get( option ), values ) > -1 + ) { + optionSet = true; + } + + /* eslint-enable no-cond-assign */ + } + + // Force browsers to behave consistently when non-matching value is set + if ( !optionSet ) { + elem.selectedIndex = -1; + } + return values; + } + } + } +} ); + +// Radios and checkboxes getter/setter +jQuery.each( [ "radio", "checkbox" ], function() { + jQuery.valHooks[ this ] = { + set: function( elem, value ) { + if ( Array.isArray( value ) ) { + return ( elem.checked = jQuery.inArray( jQuery( elem ).val(), value ) > -1 ); + } + } + }; + if ( !support.checkOn ) { + jQuery.valHooks[ this ].get = function( elem ) { + return elem.getAttribute( "value" ) === null ? "on" : elem.value; + }; + } +} ); + + + + +// Return jQuery for attributes-only inclusion + + +var rfocusMorph = /^(?:focusinfocus|focusoutblur)$/; + +jQuery.extend( jQuery.event, { + + trigger: function( event, data, elem, onlyHandlers ) { + + var i, cur, tmp, bubbleType, ontype, handle, special, + eventPath = [ elem || document ], + type = hasOwn.call( event, "type" ) ? event.type : event, + namespaces = hasOwn.call( event, "namespace" ) ? event.namespace.split( "." ) : []; + + cur = tmp = elem = elem || document; + + // Don't do events on text and comment nodes + if ( elem.nodeType === 3 || elem.nodeType === 8 ) { + return; + } + + // focus/blur morphs to focusin/out; ensure we're not firing them right now + if ( rfocusMorph.test( type + jQuery.event.triggered ) ) { + return; + } + + if ( type.indexOf( "." ) > -1 ) { + + // Namespaced trigger; create a regexp to match event type in handle() + namespaces = type.split( "." ); + type = namespaces.shift(); + namespaces.sort(); + } + ontype = type.indexOf( ":" ) < 0 && "on" + type; + + // Caller can pass in a jQuery.Event object, Object, or just an event type string + event = event[ jQuery.expando ] ? + event : + new jQuery.Event( type, typeof event === "object" && event ); + + // Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true) + event.isTrigger = onlyHandlers ? 2 : 3; + event.namespace = namespaces.join( "." ); + event.rnamespace = event.namespace ? + new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ) : + null; + + // Clean up the event in case it is being reused + event.result = undefined; + if ( !event.target ) { + event.target = elem; + } + + // Clone any incoming data and prepend the event, creating the handler arg list + data = data == null ? + [ event ] : + jQuery.makeArray( data, [ event ] ); + + // Allow special events to draw outside the lines + special = jQuery.event.special[ type ] || {}; + if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) { + return; + } + + // Determine event propagation path in advance, per W3C events spec (#9951) + // Bubble up to document, then to window; watch for a global ownerDocument var (#9724) + if ( !onlyHandlers && !special.noBubble && !jQuery.isWindow( elem ) ) { + + bubbleType = special.delegateType || type; + if ( !rfocusMorph.test( bubbleType + type ) ) { + cur = cur.parentNode; + } + for ( ; cur; cur = cur.parentNode ) { + eventPath.push( cur ); + tmp = cur; + } + + // Only add window if we got to document (e.g., not plain obj or detached DOM) + if ( tmp === ( elem.ownerDocument || document ) ) { + eventPath.push( tmp.defaultView || tmp.parentWindow || window ); + } + } + + // Fire handlers on the event path + i = 0; + while ( ( cur = eventPath[ i++ ] ) && !event.isPropagationStopped() ) { + + event.type = i > 1 ? + bubbleType : + special.bindType || type; + + // jQuery handler + handle = ( dataPriv.get( cur, "events" ) || {} )[ event.type ] && + dataPriv.get( cur, "handle" ); + if ( handle ) { + handle.apply( cur, data ); + } + + // Native handler + handle = ontype && cur[ ontype ]; + if ( handle && handle.apply && acceptData( cur ) ) { + event.result = handle.apply( cur, data ); + if ( event.result === false ) { + event.preventDefault(); + } + } + } + event.type = type; + + // If nobody prevented the default action, do it now + if ( !onlyHandlers && !event.isDefaultPrevented() ) { + + if ( ( !special._default || + special._default.apply( eventPath.pop(), data ) === false ) && + acceptData( elem ) ) { + + // Call a native DOM method on the target with the same name as the event. + // Don't do default actions on window, that's where global variables be (#6170) + if ( ontype && jQuery.isFunction( elem[ type ] ) && !jQuery.isWindow( elem ) ) { + + // Don't re-trigger an onFOO event when we call its FOO() method + tmp = elem[ ontype ]; + + if ( tmp ) { + elem[ ontype ] = null; + } + + // Prevent re-triggering of the same event, since we already bubbled it above + jQuery.event.triggered = type; + elem[ type ](); + jQuery.event.triggered = undefined; + + if ( tmp ) { + elem[ ontype ] = tmp; + } + } + } + } + + return event.result; + }, + + // Piggyback on a donor event to simulate a different one + // Used only for `focus(in | out)` events + simulate: function( type, elem, event ) { + var e = jQuery.extend( + new jQuery.Event(), + event, + { + type: type, + isSimulated: true + } + ); + + jQuery.event.trigger( e, null, elem ); + } + +} ); + +jQuery.fn.extend( { + + trigger: function( type, data ) { + return this.each( function() { + jQuery.event.trigger( type, data, this ); + } ); + }, + triggerHandler: function( type, data ) { + var elem = this[ 0 ]; + if ( elem ) { + return jQuery.event.trigger( type, data, elem, true ); + } + } +} ); + + +jQuery.each( ( "blur focus focusin focusout resize scroll click dblclick " + + "mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave " + + "change select submit keydown keypress keyup contextmenu" ).split( " " ), + function( i, name ) { + + // Handle event binding + jQuery.fn[ name ] = function( data, fn ) { + return arguments.length > 0 ? + this.on( name, null, data, fn ) : + this.trigger( name ); + }; +} ); + +jQuery.fn.extend( { + hover: function( fnOver, fnOut ) { + return this.mouseenter( fnOver ).mouseleave( fnOut || fnOver ); + } +} ); + + + + +support.focusin = "onfocusin" in window; + + +// Support: Firefox <=44 +// Firefox doesn't have focus(in | out) events +// Related ticket - https://bugzilla.mozilla.org/show_bug.cgi?id=687787 +// +// Support: Chrome <=48 - 49, Safari <=9.0 - 9.1 +// focus(in | out) events fire after focus & blur events, +// which is spec violation - http://www.w3.org/TR/DOM-Level-3-Events/#events-focusevent-event-order +// Related ticket - https://bugs.chromium.org/p/chromium/issues/detail?id=449857 +if ( !support.focusin ) { + jQuery.each( { focus: "focusin", blur: "focusout" }, function( orig, fix ) { + + // Attach a single capturing handler on the document while someone wants focusin/focusout + var handler = function( event ) { + jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ) ); + }; + + jQuery.event.special[ fix ] = { + setup: function() { + var doc = this.ownerDocument || this, + attaches = dataPriv.access( doc, fix ); + + if ( !attaches ) { + doc.addEventListener( orig, handler, true ); + } + dataPriv.access( doc, fix, ( attaches || 0 ) + 1 ); + }, + teardown: function() { + var doc = this.ownerDocument || this, + attaches = dataPriv.access( doc, fix ) - 1; + + if ( !attaches ) { + doc.removeEventListener( orig, handler, true ); + dataPriv.remove( doc, fix ); + + } else { + dataPriv.access( doc, fix, attaches ); + } + } + }; + } ); +} +var location = window.location; + +var nonce = jQuery.now(); + +var rquery = ( /\?/ ); + + + +// Cross-browser xml parsing +jQuery.parseXML = function( data ) { + var xml; + if ( !data || typeof data !== "string" ) { + return null; + } + + // Support: IE 9 - 11 only + // IE throws on parseFromString with invalid input. + try { + xml = ( new window.DOMParser() ).parseFromString( data, "text/xml" ); + } catch ( e ) { + xml = undefined; + } + + if ( !xml || xml.getElementsByTagName( "parsererror" ).length ) { + jQuery.error( "Invalid XML: " + data ); + } + return xml; +}; + + +var + rbracket = /\[\]$/, + rCRLF = /\r?\n/g, + rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i, + rsubmittable = /^(?:input|select|textarea|keygen)/i; + +function buildParams( prefix, obj, traditional, add ) { + var name; + + if ( Array.isArray( obj ) ) { + + // Serialize array item. + jQuery.each( obj, function( i, v ) { + if ( traditional || rbracket.test( prefix ) ) { + + // Treat each array item as a scalar. + add( prefix, v ); + + } else { + + // Item is non-scalar (array or object), encode its numeric index. + buildParams( + prefix + "[" + ( typeof v === "object" && v != null ? i : "" ) + "]", + v, + traditional, + add + ); + } + } ); + + } else if ( !traditional && jQuery.type( obj ) === "object" ) { + + // Serialize object item. + for ( name in obj ) { + buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add ); + } + + } else { + + // Serialize scalar item. + add( prefix, obj ); + } +} + +// Serialize an array of form elements or a set of +// key/values into a query string +jQuery.param = function( a, traditional ) { + var prefix, + s = [], + add = function( key, valueOrFunction ) { + + // If value is a function, invoke it and use its return value + var value = jQuery.isFunction( valueOrFunction ) ? + valueOrFunction() : + valueOrFunction; + + s[ s.length ] = encodeURIComponent( key ) + "=" + + encodeURIComponent( value == null ? "" : value ); + }; + + // If an array was passed in, assume that it is an array of form elements. + if ( Array.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) { + + // Serialize the form elements + jQuery.each( a, function() { + add( this.name, this.value ); + } ); + + } else { + + // If traditional, encode the "old" way (the way 1.3.2 or older + // did it), otherwise encode params recursively. + for ( prefix in a ) { + buildParams( prefix, a[ prefix ], traditional, add ); + } + } + + // Return the resulting serialization + return s.join( "&" ); +}; + +jQuery.fn.extend( { + serialize: function() { + return jQuery.param( this.serializeArray() ); + }, + serializeArray: function() { + return this.map( function() { + + // Can add propHook for "elements" to filter or add form elements + var elements = jQuery.prop( this, "elements" ); + return elements ? jQuery.makeArray( elements ) : this; + } ) + .filter( function() { + var type = this.type; + + // Use .is( ":disabled" ) so that fieldset[disabled] works + return this.name && !jQuery( this ).is( ":disabled" ) && + rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) && + ( this.checked || !rcheckableType.test( type ) ); + } ) + .map( function( i, elem ) { + var val = jQuery( this ).val(); + + if ( val == null ) { + return null; + } + + if ( Array.isArray( val ) ) { + return jQuery.map( val, function( val ) { + return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; + } ); + } + + return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) }; + } ).get(); + } +} ); + + +var + r20 = /%20/g, + rhash = /#.*$/, + rantiCache = /([?&])_=[^&]*/, + rheaders = /^(.*?):[ \t]*([^\r\n]*)$/mg, + + // #7653, #8125, #8152: local protocol detection + rlocalProtocol = /^(?:about|app|app-storage|.+-extension|file|res|widget):$/, + rnoContent = /^(?:GET|HEAD)$/, + rprotocol = /^\/\//, + + /* Prefilters + * 1) They are useful to introduce custom dataTypes (see ajax/jsonp.js for an example) + * 2) These are called: + * - BEFORE asking for a transport + * - AFTER param serialization (s.data is a string if s.processData is true) + * 3) key is the dataType + * 4) the catchall symbol "*" can be used + * 5) execution will start with transport dataType and THEN continue down to "*" if needed + */ + prefilters = {}, + + /* Transports bindings + * 1) key is the dataType + * 2) the catchall symbol "*" can be used + * 3) selection will start with transport dataType and THEN go to "*" if needed + */ + transports = {}, + + // Avoid comment-prolog char sequence (#10098); must appease lint and evade compression + allTypes = "*/".concat( "*" ), + + // Anchor tag for parsing the document origin + originAnchor = document.createElement( "a" ); + originAnchor.href = location.href; + +// Base "constructor" for jQuery.ajaxPrefilter and jQuery.ajaxTransport +function addToPrefiltersOrTransports( structure ) { + + // dataTypeExpression is optional and defaults to "*" + return function( dataTypeExpression, func ) { + + if ( typeof dataTypeExpression !== "string" ) { + func = dataTypeExpression; + dataTypeExpression = "*"; + } + + var dataType, + i = 0, + dataTypes = dataTypeExpression.toLowerCase().match( rnothtmlwhite ) || []; + + if ( jQuery.isFunction( func ) ) { + + // For each dataType in the dataTypeExpression + while ( ( dataType = dataTypes[ i++ ] ) ) { + + // Prepend if requested + if ( dataType[ 0 ] === "+" ) { + dataType = dataType.slice( 1 ) || "*"; + ( structure[ dataType ] = structure[ dataType ] || [] ).unshift( func ); + + // Otherwise append + } else { + ( structure[ dataType ] = structure[ dataType ] || [] ).push( func ); + } + } + } + }; +} + +// Base inspection function for prefilters and transports +function inspectPrefiltersOrTransports( structure, options, originalOptions, jqXHR ) { + + var inspected = {}, + seekingTransport = ( structure === transports ); + + function inspect( dataType ) { + var selected; + inspected[ dataType ] = true; + jQuery.each( structure[ dataType ] || [], function( _, prefilterOrFactory ) { + var dataTypeOrTransport = prefilterOrFactory( options, originalOptions, jqXHR ); + if ( typeof dataTypeOrTransport === "string" && + !seekingTransport && !inspected[ dataTypeOrTransport ] ) { + + options.dataTypes.unshift( dataTypeOrTransport ); + inspect( dataTypeOrTransport ); + return false; + } else if ( seekingTransport ) { + return !( selected = dataTypeOrTransport ); + } + } ); + return selected; + } + + return inspect( options.dataTypes[ 0 ] ) || !inspected[ "*" ] && inspect( "*" ); +} + +// A special extend for ajax options +// that takes "flat" options (not to be deep extended) +// Fixes #9887 +function ajaxExtend( target, src ) { + var key, deep, + flatOptions = jQuery.ajaxSettings.flatOptions || {}; + + for ( key in src ) { + if ( src[ key ] !== undefined ) { + ( flatOptions[ key ] ? target : ( deep || ( deep = {} ) ) )[ key ] = src[ key ]; + } + } + if ( deep ) { + jQuery.extend( true, target, deep ); + } + + return target; +} + +/* Handles responses to an ajax request: + * - finds the right dataType (mediates between content-type and expected dataType) + * - returns the corresponding response + */ +function ajaxHandleResponses( s, jqXHR, responses ) { + + var ct, type, finalDataType, firstDataType, + contents = s.contents, + dataTypes = s.dataTypes; + + // Remove auto dataType and get content-type in the process + while ( dataTypes[ 0 ] === "*" ) { + dataTypes.shift(); + if ( ct === undefined ) { + ct = s.mimeType || jqXHR.getResponseHeader( "Content-Type" ); + } + } + + // Check if we're dealing with a known content-type + if ( ct ) { + for ( type in contents ) { + if ( contents[ type ] && contents[ type ].test( ct ) ) { + dataTypes.unshift( type ); + break; + } + } + } + + // Check to see if we have a response for the expected dataType + if ( dataTypes[ 0 ] in responses ) { + finalDataType = dataTypes[ 0 ]; + } else { + + // Try convertible dataTypes + for ( type in responses ) { + if ( !dataTypes[ 0 ] || s.converters[ type + " " + dataTypes[ 0 ] ] ) { + finalDataType = type; + break; + } + if ( !firstDataType ) { + firstDataType = type; + } + } + + // Or just use first one + finalDataType = finalDataType || firstDataType; + } + + // If we found a dataType + // We add the dataType to the list if needed + // and return the corresponding response + if ( finalDataType ) { + if ( finalDataType !== dataTypes[ 0 ] ) { + dataTypes.unshift( finalDataType ); + } + return responses[ finalDataType ]; + } +} + +/* Chain conversions given the request and the original response + * Also sets the responseXXX fields on the jqXHR instance + */ +function ajaxConvert( s, response, jqXHR, isSuccess ) { + var conv2, current, conv, tmp, prev, + converters = {}, + + // Work with a copy of dataTypes in case we need to modify it for conversion + dataTypes = s.dataTypes.slice(); + + // Create converters map with lowercased keys + if ( dataTypes[ 1 ] ) { + for ( conv in s.converters ) { + converters[ conv.toLowerCase() ] = s.converters[ conv ]; + } + } + + current = dataTypes.shift(); + + // Convert to each sequential dataType + while ( current ) { + + if ( s.responseFields[ current ] ) { + jqXHR[ s.responseFields[ current ] ] = response; + } + + // Apply the dataFilter if provided + if ( !prev && isSuccess && s.dataFilter ) { + response = s.dataFilter( response, s.dataType ); + } + + prev = current; + current = dataTypes.shift(); + + if ( current ) { + + // There's only work to do if current dataType is non-auto + if ( current === "*" ) { + + current = prev; + + // Convert response if prev dataType is non-auto and differs from current + } else if ( prev !== "*" && prev !== current ) { + + // Seek a direct converter + conv = converters[ prev + " " + current ] || converters[ "* " + current ]; + + // If none found, seek a pair + if ( !conv ) { + for ( conv2 in converters ) { + + // If conv2 outputs current + tmp = conv2.split( " " ); + if ( tmp[ 1 ] === current ) { + + // If prev can be converted to accepted input + conv = converters[ prev + " " + tmp[ 0 ] ] || + converters[ "* " + tmp[ 0 ] ]; + if ( conv ) { + + // Condense equivalence converters + if ( conv === true ) { + conv = converters[ conv2 ]; + + // Otherwise, insert the intermediate dataType + } else if ( converters[ conv2 ] !== true ) { + current = tmp[ 0 ]; + dataTypes.unshift( tmp[ 1 ] ); + } + break; + } + } + } + } + + // Apply converter (if not an equivalence) + if ( conv !== true ) { + + // Unless errors are allowed to bubble, catch and return them + if ( conv && s.throws ) { + response = conv( response ); + } else { + try { + response = conv( response ); + } catch ( e ) { + return { + state: "parsererror", + error: conv ? e : "No conversion from " + prev + " to " + current + }; + } + } + } + } + } + } + + return { state: "success", data: response }; +} + +jQuery.extend( { + + // Counter for holding the number of active queries + active: 0, + + // Last-Modified header cache for next request + lastModified: {}, + etag: {}, + + ajaxSettings: { + url: location.href, + type: "GET", + isLocal: rlocalProtocol.test( location.protocol ), + global: true, + processData: true, + async: true, + contentType: "application/x-www-form-urlencoded; charset=UTF-8", + + /* + timeout: 0, + data: null, + dataType: null, + username: null, + password: null, + cache: null, + throws: false, + traditional: false, + headers: {}, + */ + + accepts: { + "*": allTypes, + text: "text/plain", + html: "text/html", + xml: "application/xml, text/xml", + json: "application/json, text/javascript" + }, + + contents: { + xml: /\bxml\b/, + html: /\bhtml/, + json: /\bjson\b/ + }, + + responseFields: { + xml: "responseXML", + text: "responseText", + json: "responseJSON" + }, + + // Data converters + // Keys separate source (or catchall "*") and destination types with a single space + converters: { + + // Convert anything to text + "* text": String, + + // Text to html (true = no transformation) + "text html": true, + + // Evaluate text as a json expression + "text json": JSON.parse, + + // Parse text as xml + "text xml": jQuery.parseXML + }, + + // For options that shouldn't be deep extended: + // you can add your own custom options here if + // and when you create one that shouldn't be + // deep extended (see ajaxExtend) + flatOptions: { + url: true, + context: true + } + }, + + // Creates a full fledged settings object into target + // with both ajaxSettings and settings fields. + // If target is omitted, writes into ajaxSettings. + ajaxSetup: function( target, settings ) { + return settings ? + + // Building a settings object + ajaxExtend( ajaxExtend( target, jQuery.ajaxSettings ), settings ) : + + // Extending ajaxSettings + ajaxExtend( jQuery.ajaxSettings, target ); + }, + + ajaxPrefilter: addToPrefiltersOrTransports( prefilters ), + ajaxTransport: addToPrefiltersOrTransports( transports ), + + // Main method + ajax: function( url, options ) { + + // If url is an object, simulate pre-1.5 signature + if ( typeof url === "object" ) { + options = url; + url = undefined; + } + + // Force options to be an object + options = options || {}; + + var transport, + + // URL without anti-cache param + cacheURL, + + // Response headers + responseHeadersString, + responseHeaders, + + // timeout handle + timeoutTimer, + + // Url cleanup var + urlAnchor, + + // Request state (becomes false upon send and true upon completion) + completed, + + // To know if global events are to be dispatched + fireGlobals, + + // Loop variable + i, + + // uncached part of the url + uncached, + + // Create the final options object + s = jQuery.ajaxSetup( {}, options ), + + // Callbacks context + callbackContext = s.context || s, + + // Context for global events is callbackContext if it is a DOM node or jQuery collection + globalEventContext = s.context && + ( callbackContext.nodeType || callbackContext.jquery ) ? + jQuery( callbackContext ) : + jQuery.event, + + // Deferreds + deferred = jQuery.Deferred(), + completeDeferred = jQuery.Callbacks( "once memory" ), + + // Status-dependent callbacks + statusCode = s.statusCode || {}, + + // Headers (they are sent all at once) + requestHeaders = {}, + requestHeadersNames = {}, + + // Default abort message + strAbort = "canceled", + + // Fake xhr + jqXHR = { + readyState: 0, + + // Builds headers hashtable if needed + getResponseHeader: function( key ) { + var match; + if ( completed ) { + if ( !responseHeaders ) { + responseHeaders = {}; + while ( ( match = rheaders.exec( responseHeadersString ) ) ) { + responseHeaders[ match[ 1 ].toLowerCase() ] = match[ 2 ]; + } + } + match = responseHeaders[ key.toLowerCase() ]; + } + return match == null ? null : match; + }, + + // Raw string + getAllResponseHeaders: function() { + return completed ? responseHeadersString : null; + }, + + // Caches the header + setRequestHeader: function( name, value ) { + if ( completed == null ) { + name = requestHeadersNames[ name.toLowerCase() ] = + requestHeadersNames[ name.toLowerCase() ] || name; + requestHeaders[ name ] = value; + } + return this; + }, + + // Overrides response content-type header + overrideMimeType: function( type ) { + if ( completed == null ) { + s.mimeType = type; + } + return this; + }, + + // Status-dependent callbacks + statusCode: function( map ) { + var code; + if ( map ) { + if ( completed ) { + + // Execute the appropriate callbacks + jqXHR.always( map[ jqXHR.status ] ); + } else { + + // Lazy-add the new callbacks in a way that preserves old ones + for ( code in map ) { + statusCode[ code ] = [ statusCode[ code ], map[ code ] ]; + } + } + } + return this; + }, + + // Cancel the request + abort: function( statusText ) { + var finalText = statusText || strAbort; + if ( transport ) { + transport.abort( finalText ); + } + done( 0, finalText ); + return this; + } + }; + + // Attach deferreds + deferred.promise( jqXHR ); + + // Add protocol if not provided (prefilters might expect it) + // Handle falsy url in the settings object (#10093: consistency with old signature) + // We also use the url parameter if available + s.url = ( ( url || s.url || location.href ) + "" ) + .replace( rprotocol, location.protocol + "//" ); + + // Alias method option to type as per ticket #12004 + s.type = options.method || options.type || s.method || s.type; + + // Extract dataTypes list + s.dataTypes = ( s.dataType || "*" ).toLowerCase().match( rnothtmlwhite ) || [ "" ]; + + // A cross-domain request is in order when the origin doesn't match the current origin. + if ( s.crossDomain == null ) { + urlAnchor = document.createElement( "a" ); + + // Support: IE <=8 - 11, Edge 12 - 13 + // IE throws exception on accessing the href property if url is malformed, + // e.g. http://example.com:80x/ + try { + urlAnchor.href = s.url; + + // Support: IE <=8 - 11 only + // Anchor's host property isn't correctly set when s.url is relative + urlAnchor.href = urlAnchor.href; + s.crossDomain = originAnchor.protocol + "//" + originAnchor.host !== + urlAnchor.protocol + "//" + urlAnchor.host; + } catch ( e ) { + + // If there is an error parsing the URL, assume it is crossDomain, + // it can be rejected by the transport if it is invalid + s.crossDomain = true; + } + } + + // Convert data if not already a string + if ( s.data && s.processData && typeof s.data !== "string" ) { + s.data = jQuery.param( s.data, s.traditional ); + } + + // Apply prefilters + inspectPrefiltersOrTransports( prefilters, s, options, jqXHR ); + + // If request was aborted inside a prefilter, stop there + if ( completed ) { + return jqXHR; + } + + // We can fire global events as of now if asked to + // Don't fire events if jQuery.event is undefined in an AMD-usage scenario (#15118) + fireGlobals = jQuery.event && s.global; + + // Watch for a new set of requests + if ( fireGlobals && jQuery.active++ === 0 ) { + jQuery.event.trigger( "ajaxStart" ); + } + + // Uppercase the type + s.type = s.type.toUpperCase(); + + // Determine if request has content + s.hasContent = !rnoContent.test( s.type ); + + // Save the URL in case we're toying with the If-Modified-Since + // and/or If-None-Match header later on + // Remove hash to simplify url manipulation + cacheURL = s.url.replace( rhash, "" ); + + // More options handling for requests with no content + if ( !s.hasContent ) { + + // Remember the hash so we can put it back + uncached = s.url.slice( cacheURL.length ); + + // If data is available, append data to url + if ( s.data ) { + cacheURL += ( rquery.test( cacheURL ) ? "&" : "?" ) + s.data; + + // #9682: remove data so that it's not used in an eventual retry + delete s.data; + } + + // Add or update anti-cache param if needed + if ( s.cache === false ) { + cacheURL = cacheURL.replace( rantiCache, "$1" ); + uncached = ( rquery.test( cacheURL ) ? "&" : "?" ) + "_=" + ( nonce++ ) + uncached; + } + + // Put hash and anti-cache on the URL that will be requested (gh-1732) + s.url = cacheURL + uncached; + + // Change '%20' to '+' if this is encoded form body content (gh-2658) + } else if ( s.data && s.processData && + ( s.contentType || "" ).indexOf( "application/x-www-form-urlencoded" ) === 0 ) { + s.data = s.data.replace( r20, "+" ); + } + + // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. + if ( s.ifModified ) { + if ( jQuery.lastModified[ cacheURL ] ) { + jqXHR.setRequestHeader( "If-Modified-Since", jQuery.lastModified[ cacheURL ] ); + } + if ( jQuery.etag[ cacheURL ] ) { + jqXHR.setRequestHeader( "If-None-Match", jQuery.etag[ cacheURL ] ); + } + } + + // Set the correct header, if data is being sent + if ( s.data && s.hasContent && s.contentType !== false || options.contentType ) { + jqXHR.setRequestHeader( "Content-Type", s.contentType ); + } + + // Set the Accepts header for the server, depending on the dataType + jqXHR.setRequestHeader( + "Accept", + s.dataTypes[ 0 ] && s.accepts[ s.dataTypes[ 0 ] ] ? + s.accepts[ s.dataTypes[ 0 ] ] + + ( s.dataTypes[ 0 ] !== "*" ? ", " + allTypes + "; q=0.01" : "" ) : + s.accepts[ "*" ] + ); + + // Check for headers option + for ( i in s.headers ) { + jqXHR.setRequestHeader( i, s.headers[ i ] ); + } + + // Allow custom headers/mimetypes and early abort + if ( s.beforeSend && + ( s.beforeSend.call( callbackContext, jqXHR, s ) === false || completed ) ) { + + // Abort if not done already and return + return jqXHR.abort(); + } + + // Aborting is no longer a cancellation + strAbort = "abort"; + + // Install callbacks on deferreds + completeDeferred.add( s.complete ); + jqXHR.done( s.success ); + jqXHR.fail( s.error ); + + // Get transport + transport = inspectPrefiltersOrTransports( transports, s, options, jqXHR ); + + // If no transport, we auto-abort + if ( !transport ) { + done( -1, "No Transport" ); + } else { + jqXHR.readyState = 1; + + // Send global event + if ( fireGlobals ) { + globalEventContext.trigger( "ajaxSend", [ jqXHR, s ] ); + } + + // If request was aborted inside ajaxSend, stop there + if ( completed ) { + return jqXHR; + } + + // Timeout + if ( s.async && s.timeout > 0 ) { + timeoutTimer = window.setTimeout( function() { + jqXHR.abort( "timeout" ); + }, s.timeout ); + } + + try { + completed = false; + transport.send( requestHeaders, done ); + } catch ( e ) { + + // Rethrow post-completion exceptions + if ( completed ) { + throw e; + } + + // Propagate others as results + done( -1, e ); + } + } + + // Callback for when everything is done + function done( status, nativeStatusText, responses, headers ) { + var isSuccess, success, error, response, modified, + statusText = nativeStatusText; + + // Ignore repeat invocations + if ( completed ) { + return; + } + + completed = true; + + // Clear timeout if it exists + if ( timeoutTimer ) { + window.clearTimeout( timeoutTimer ); + } + + // Dereference transport for early garbage collection + // (no matter how long the jqXHR object will be used) + transport = undefined; + + // Cache response headers + responseHeadersString = headers || ""; + + // Set readyState + jqXHR.readyState = status > 0 ? 4 : 0; + + // Determine if successful + isSuccess = status >= 200 && status < 300 || status === 304; + + // Get response data + if ( responses ) { + response = ajaxHandleResponses( s, jqXHR, responses ); + } + + // Convert no matter what (that way responseXXX fields are always set) + response = ajaxConvert( s, response, jqXHR, isSuccess ); + + // If successful, handle type chaining + if ( isSuccess ) { + + // Set the If-Modified-Since and/or If-None-Match header, if in ifModified mode. + if ( s.ifModified ) { + modified = jqXHR.getResponseHeader( "Last-Modified" ); + if ( modified ) { + jQuery.lastModified[ cacheURL ] = modified; + } + modified = jqXHR.getResponseHeader( "etag" ); + if ( modified ) { + jQuery.etag[ cacheURL ] = modified; + } + } + + // if no content + if ( status === 204 || s.type === "HEAD" ) { + statusText = "nocontent"; + + // if not modified + } else if ( status === 304 ) { + statusText = "notmodified"; + + // If we have data, let's convert it + } else { + statusText = response.state; + success = response.data; + error = response.error; + isSuccess = !error; + } + } else { + + // Extract error from statusText and normalize for non-aborts + error = statusText; + if ( status || !statusText ) { + statusText = "error"; + if ( status < 0 ) { + status = 0; + } + } + } + + // Set data for the fake xhr object + jqXHR.status = status; + jqXHR.statusText = ( nativeStatusText || statusText ) + ""; + + // Success/Error + if ( isSuccess ) { + deferred.resolveWith( callbackContext, [ success, statusText, jqXHR ] ); + } else { + deferred.rejectWith( callbackContext, [ jqXHR, statusText, error ] ); + } + + // Status-dependent callbacks + jqXHR.statusCode( statusCode ); + statusCode = undefined; + + if ( fireGlobals ) { + globalEventContext.trigger( isSuccess ? "ajaxSuccess" : "ajaxError", + [ jqXHR, s, isSuccess ? success : error ] ); + } + + // Complete + completeDeferred.fireWith( callbackContext, [ jqXHR, statusText ] ); + + if ( fireGlobals ) { + globalEventContext.trigger( "ajaxComplete", [ jqXHR, s ] ); + + // Handle the global AJAX counter + if ( !( --jQuery.active ) ) { + jQuery.event.trigger( "ajaxStop" ); + } + } + } + + return jqXHR; + }, + + getJSON: function( url, data, callback ) { + return jQuery.get( url, data, callback, "json" ); + }, + + getScript: function( url, callback ) { + return jQuery.get( url, undefined, callback, "script" ); + } +} ); + +jQuery.each( [ "get", "post" ], function( i, method ) { + jQuery[ method ] = function( url, data, callback, type ) { + + // Shift arguments if data argument was omitted + if ( jQuery.isFunction( data ) ) { + type = type || callback; + callback = data; + data = undefined; + } + + // The url can be an options object (which then must have .url) + return jQuery.ajax( jQuery.extend( { + url: url, + type: method, + dataType: type, + data: data, + success: callback + }, jQuery.isPlainObject( url ) && url ) ); + }; +} ); + + +jQuery._evalUrl = function( url ) { + return jQuery.ajax( { + url: url, + + // Make this explicit, since user can override this through ajaxSetup (#11264) + type: "GET", + dataType: "script", + cache: true, + async: false, + global: false, + "throws": true + } ); +}; + + +jQuery.fn.extend( { + wrapAll: function( html ) { + var wrap; + + if ( this[ 0 ] ) { + if ( jQuery.isFunction( html ) ) { + html = html.call( this[ 0 ] ); + } + + // The elements to wrap the target around + wrap = jQuery( html, this[ 0 ].ownerDocument ).eq( 0 ).clone( true ); + + if ( this[ 0 ].parentNode ) { + wrap.insertBefore( this[ 0 ] ); + } + + wrap.map( function() { + var elem = this; + + while ( elem.firstElementChild ) { + elem = elem.firstElementChild; + } + + return elem; + } ).append( this ); + } + + return this; + }, + + wrapInner: function( html ) { + if ( jQuery.isFunction( html ) ) { + return this.each( function( i ) { + jQuery( this ).wrapInner( html.call( this, i ) ); + } ); + } + + return this.each( function() { + var self = jQuery( this ), + contents = self.contents(); + + if ( contents.length ) { + contents.wrapAll( html ); + + } else { + self.append( html ); + } + } ); + }, + + wrap: function( html ) { + var isFunction = jQuery.isFunction( html ); + + return this.each( function( i ) { + jQuery( this ).wrapAll( isFunction ? html.call( this, i ) : html ); + } ); + }, + + unwrap: function( selector ) { + this.parent( selector ).not( "body" ).each( function() { + jQuery( this ).replaceWith( this.childNodes ); + } ); + return this; + } +} ); + + +jQuery.expr.pseudos.hidden = function( elem ) { + return !jQuery.expr.pseudos.visible( elem ); +}; +jQuery.expr.pseudos.visible = function( elem ) { + return !!( elem.offsetWidth || elem.offsetHeight || elem.getClientRects().length ); +}; + + + + +jQuery.ajaxSettings.xhr = function() { + try { + return new window.XMLHttpRequest(); + } catch ( e ) {} +}; + +var xhrSuccessStatus = { + + // File protocol always yields status code 0, assume 200 + 0: 200, + + // Support: IE <=9 only + // #1450: sometimes IE returns 1223 when it should be 204 + 1223: 204 + }, + xhrSupported = jQuery.ajaxSettings.xhr(); + +support.cors = !!xhrSupported && ( "withCredentials" in xhrSupported ); +support.ajax = xhrSupported = !!xhrSupported; + +jQuery.ajaxTransport( function( options ) { + var callback, errorCallback; + + // Cross domain only allowed if supported through XMLHttpRequest + if ( support.cors || xhrSupported && !options.crossDomain ) { + return { + send: function( headers, complete ) { + var i, + xhr = options.xhr(); + + xhr.open( + options.type, + options.url, + options.async, + options.username, + options.password + ); + + // Apply custom fields if provided + if ( options.xhrFields ) { + for ( i in options.xhrFields ) { + xhr[ i ] = options.xhrFields[ i ]; + } + } + + // Override mime type if needed + if ( options.mimeType && xhr.overrideMimeType ) { + xhr.overrideMimeType( options.mimeType ); + } + + // X-Requested-With header + // For cross-domain requests, seeing as conditions for a preflight are + // akin to a jigsaw puzzle, we simply never set it to be sure. + // (it can always be set on a per-request basis or even using ajaxSetup) + // For same-domain requests, won't change header if already provided. + if ( !options.crossDomain && !headers[ "X-Requested-With" ] ) { + headers[ "X-Requested-With" ] = "XMLHttpRequest"; + } + + // Set headers + for ( i in headers ) { + xhr.setRequestHeader( i, headers[ i ] ); + } + + // Callback + callback = function( type ) { + return function() { + if ( callback ) { + callback = errorCallback = xhr.onload = + xhr.onerror = xhr.onabort = xhr.onreadystatechange = null; + + if ( type === "abort" ) { + xhr.abort(); + } else if ( type === "error" ) { + + // Support: IE <=9 only + // On a manual native abort, IE9 throws + // errors on any property access that is not readyState + if ( typeof xhr.status !== "number" ) { + complete( 0, "error" ); + } else { + complete( + + // File: protocol always yields status 0; see #8605, #14207 + xhr.status, + xhr.statusText + ); + } + } else { + complete( + xhrSuccessStatus[ xhr.status ] || xhr.status, + xhr.statusText, + + // Support: IE <=9 only + // IE9 has no XHR2 but throws on binary (trac-11426) + // For XHR2 non-text, let the caller handle it (gh-2498) + ( xhr.responseType || "text" ) !== "text" || + typeof xhr.responseText !== "string" ? + { binary: xhr.response } : + { text: xhr.responseText }, + xhr.getAllResponseHeaders() + ); + } + } + }; + }; + + // Listen to events + xhr.onload = callback(); + errorCallback = xhr.onerror = callback( "error" ); + + // Support: IE 9 only + // Use onreadystatechange to replace onabort + // to handle uncaught aborts + if ( xhr.onabort !== undefined ) { + xhr.onabort = errorCallback; + } else { + xhr.onreadystatechange = function() { + + // Check readyState before timeout as it changes + if ( xhr.readyState === 4 ) { + + // Allow onerror to be called first, + // but that will not handle a native abort + // Also, save errorCallback to a variable + // as xhr.onerror cannot be accessed + window.setTimeout( function() { + if ( callback ) { + errorCallback(); + } + } ); + } + }; + } + + // Create the abort callback + callback = callback( "abort" ); + + try { + + // Do send the request (this may raise an exception) + xhr.send( options.hasContent && options.data || null ); + } catch ( e ) { + + // #14683: Only rethrow if this hasn't been notified as an error yet + if ( callback ) { + throw e; + } + } + }, + + abort: function() { + if ( callback ) { + callback(); + } + } + }; + } +} ); + + + + +// Prevent auto-execution of scripts when no explicit dataType was provided (See gh-2432) +jQuery.ajaxPrefilter( function( s ) { + if ( s.crossDomain ) { + s.contents.script = false; + } +} ); + +// Install script dataType +jQuery.ajaxSetup( { + accepts: { + script: "text/javascript, application/javascript, " + + "application/ecmascript, application/x-ecmascript" + }, + contents: { + script: /\b(?:java|ecma)script\b/ + }, + converters: { + "text script": function( text ) { + jQuery.globalEval( text ); + return text; + } + } +} ); + +// Handle cache's special case and crossDomain +jQuery.ajaxPrefilter( "script", function( s ) { + if ( s.cache === undefined ) { + s.cache = false; + } + if ( s.crossDomain ) { + s.type = "GET"; + } +} ); + +// Bind script tag hack transport +jQuery.ajaxTransport( "script", function( s ) { + + // This transport only deals with cross domain requests + if ( s.crossDomain ) { + var script, callback; + return { + send: function( _, complete ) { + script = jQuery( " + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

3. Entity Create/Update/Append¶

+
+_images/flow-1.png +
+

The Figure is showing the operational flow of entity create/update/append in the Scorpio Broker system. Following are the marked steps interpretation:

+
    +
  1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to create/update/append an entity in the form of the HTTP POST request.
  2. +
  3. The request enters in service API gateway.
  4. +
+

 2.1. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service.

+

 2.2. The service API gateway forwards the HTTP request to the Entity Manager micro-service.

+
    +
  1. The entity Manager internally calls an LDContext resolver service to resolve the payload with the given context sent along with the POST request. Once the payload is resolved with context, it now fetches the previously stored data/entities from the Topic “Entities†and validates the requested entity against the existing stored entities based on EntityID.
  2. +
+
    +
  • If the entity is already present (or with all the attributes and values that are requested to be modified), an error message (“already existsâ€) will be responded for the same and no further step will be executed.
  • +
  • Else it will move for further processing.
  • +
+
    +
  1. The Entity Manager (EM) will do publish/store and send the response to the requester for the requested Entity(E1) creation operation given as follows:
  2. +
+

 4.1.EM publishes the E1 in the Kafka under Topic “Entitiesâ€.

+

 4.2.EM publishes the E1 in the Kafka under Topic “Entity_Create/Update/Append†as well.

+

 4.3. Upon successful pub operation, EM will send the response back.

+

Note: “Entities†topic will save all changes of an entity done over a period of time by any of the create/update/append operations of an entity. However, “Entity_Create/Update/Append†Topic (specific to CREATE operation) will only store the data changes of entity create operation only. Having different topics per operation will avoid ambiguity situations among different consumers different requirements. E.g. the subscription manager may need to subscribe for the whole entity, a set of specific attributes, or might be some value change of certain attributes. So, managing all these requirements would be hard if a separate topic per operation is not maintained and would be very simplified to provide direct delta change in data for the given entity at any point in time if separate topics per operation are maintained. Therefore, putting all operations data in a single topic cannot offer the required decoupling, simplification, and flexibility to subscribe/manage at operations, data, or delta data level requirements. +So that’s why creating separate topics per operation and one common topic for recording all changes (require to validate the whole entity changes for all operations over a period of time) of all operations to the given entity is the favorable design choice. +The context for the given payload is being stored by the LDContext resolver service in the Kafka topic under the name AtContext.

+
    +
  1. When a message gets published to Kafka Topics, the consumers of that topic will get notified who has subscribed or listening to those topics. In this case, the consumers of “Entity Create/Update/Append†topic upon receiving notification will do the following:
  2. +
+

 5.1. Subscription Manager when getting a notification for the related entity it will check for the notification validation for the current entity and checks if the notification needs to be sent accordingly.

+

 5.2. Storage Manager, upon notification from Entities & CR Topics, will trigger the further operations to store/modify the entity related changes in the DB tables.

+
    +
  1. Now entity manager also prepares for registration of the entity data model in the Context Registry. Following are the further functions it performs to achieve the same:
  2. +
+

 6.1. So it prepares the csource registration payload (as per NGSI_LD spec section C.3) from the entity payload and fills the necessary field (like id, endpoint as broker IP, location, etc.). Afterword entity manager writes this created csource payload in the CR Topic.

+

 6.2.CR Manager listens to this CR topic and then able to know that some entity has registered.

+

 6.3.CR manager writes the updates, if any are there, into the Csource Topic.

+
+
+

4. Entity Subscription¶

+
+_images/flow-2.png +
+

The Figure is showing the operational flow of entity subscription in the Scorpio Broker system. Following are the marked steps interpretation:

+
    +
  1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to subscribe for an entity (or attribute) in the form of the HTTP POST request.
  2. +
  3. The request enters in service API gateway.
  4. +
+

 2.1. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service.

+

 2.2. The service API gateway forwards the HTTP request to the Subscription Manager micro-service.

+
    +
  1. The Subscription Manager internally calls an LDContext resolver service to resolve the payload with the given context sent along with the POST request. The subscription manager then fetches the previously stored data/entities from the Topic “Subscription†and validates the requested entity against the existing stored values based on EntityID.
  2. +
+
    +
  • If the data for the current request is already present, an error message will be responded for the same and no further step will be executed.
  • +
  • Else it will move for further processing.
  • +
+
    +
  1. The Subscription Manager (SM) will publish/store and send the response to the requestor for the requested operation given as follows:
  2. +
+

 4.1.SM publish the subscription S(E1) in the Kafka under Topic “Subscriptionâ€

+

 4.2.SM will start the notification functionality and will start/keep listening for related subscription on.

+

 4.2.1. Entity related topics “Create/Update/Appendâ€

+

 4.2.2.Context source related topic i.e. “CSource†topic for any future registration of context sources. Doing this it avoids the need to query CR explicitly for csources for already subscribed items/entities.

+

 4.2.3.CRQueryResult Topic for gathering results of the raised specific queries, if any are there.

+

 4.2.4. Upon successful subscription condition of subscription request, SM will notify the subscribed entity to the given endpoint back. And also do the remote subscriptions to the context sources provided by the context registry.

+

 4.3. Upon successful pub operation, SM will send the response back

+

5.SM optionally may raise the query to CR by posting in the CRQuery Topic for each of the subscription requests received (only once per each subscription request). When a message gets published to CRQuery Topic, the consumer CR will be notified who has subscribed or listening on this topic. Now, CR will do the following:

+

 5.1. CR will receive the notification and checks for the list of context sources by pulling data from CR Topic and/or CSourceSub Topic for whom this subscription may valid.

+

 5.2.CR publishes the list of context sources into the CRQueryResult topic upon which the SM would have already started listening and repeat steps 4.2.3 and 4.2.4.

+

Note: CSource Topic will contain the list of context sources registered through Csource registration interface directly. CR Topic will contain the map of Entity Data model (maintained as an entity ID) created based on entity creation request (through IoT broker interface) and/or provider/data source of that entity model. +Limitation: In the first release of Scorpio Broker, Csource query is not supported instead csource query is based on the internal messaging queue mechanism. In the future, both the message queue and Rest based csource query would be supported.

+
+
+

5. Query¶

+
+_images/flow-3.png +
+

The Figure is showing the operational flow of entity subscription in the Scorpio Broker system. Following are the marked steps interpretation:

+
    +
  1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to query for entities/an entity/attribute in the form of an HTTP GET request.
  2. +
  3. The request enters in service API gateway.
  4. +
+

 2.1. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service.

+

 2.2. The service API gateway forwards the HTTP request to the Query Manager micro-service.

+
    +
  1. The query manager now fetches the previously stored data/entities from the Topic “Entitiesâ€.
  2. +
+
    +
  • If the query is for all entities or specific entities with id and/or attribute is requested, this will be directly served based on Kafka Entity topic data by query manager without involving the storage manager. In short simpler queries like non-geo queries or without regular expression queries associated with entity or entities can be served directly. In this case, the response will be sent back and processing jumps to step 7.2.
  • +
  • For complex queries, the query manager will take help from the storage manager as mention in the following steps.
  • +
+
    +
  1. The Query Manager (in case of complex queries) will publish the query (embedding a used in the message and other metadata) into the Query topic which is being listened by the Storage manager.
  2. +
  3. The storage manager gets the notification for the requested query and starts processing the query over the DB data and builds the query response.
  4. +
  5. The storage manager publishes the response of query in the Query topic which is being listened by Query manager.
  6. +
  7. The QM receives the notification from the QueryResult topic.
  8. +
+

 7.1.  It sends the HTTP response back to the API gateway.

+

 7.2.API gateway sends back the response to the end-user/requestor.

+
+
+

6. Context Source Registration¶

+
+_images/flow-4.png +
+

The Figure is showing the operational flow of context source registration in the Scorpio Broker system. Following are the marked steps interpretation:

+
    +
  1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to csource registration for in the form of an HTTP POST request.
  2. +
  3. The request enters in service API gateway.
  4. +
+

 a. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service.

+

 b.The service API gateway forwards the HTTP request to the Context Registry (CR) Manager micro-service.

+
    +
  1. The CR manager now fetches the previously stored data/entities from the Topic “CSourceâ€.
  2. +
+

 a. If the entry for the request csource is already present it exits the processing and informing the same to the requester. If it is not present, then it continues for further processing.

+

 b.Now the CR manager performs some basic validation to check if this is a valid request with the valid payload.

+

 c.CR manager now writes this payload into the Csoure Topic.

+
    +
  1. The Storage Manager will keep listening for the Csource topic and for any new entry write it perform the relative operation in the database.
  2. +
  3. The CR manager prepares the response for csource request and
  4. +
+

 5.1 sends the Http response back to the API gateway.

+

 5.2 API gateway sends back the response to the end-user/requester.

+

Note: For Conext Source Update request only the payload will get changes and in step 3 upon validation for the existing entity it will not exit rather it will update the retrieved entity and write it back into the Kafka. The rest of the flow will remain mostly the same.

+
+
+

7. Context Source Subscription¶

+
+_images/flow-5.png +
+

The Figure Scorpio Broker Context Source Subscription Flow is showing the operational flow of context source subscriptions in the Scorpio Broker system. Following are the marked steps interpretation:

+
    +
  1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to csource updates in the form of an HTTP POST request.
  2. +
  3. The request enters in service API gateway.
  4. +
+

 a. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service.

+

 b.The service API gateway forwards the HTTP request to the Context Registry (CR) Manager micro-service.

+
    +
  1. The CR manager now fetches the previously stored data/entities from the Topic “CSourceSubâ€.
  2. +
+

 a. Now the CR manager performs some basic validation to check if this is a valid request with the valid payload.

+

 b.If the entry for the request csource subscription is already present it exits the processing and informing the same to the requester. If it is not present, then it continues for further processing.

+

 c.CR manager now writes this payload into the CsourceSub Topic.

+

 d.In parallel, it will also start an independent thread to listen Csource Topic for the requested subscription and upon the successful condition, the notification will be sent to the registered endpoint provided under subscription payload.

+
    +
  1. The Storage Manager will keep listening for the CsourceSub topic and for any new/updated entry write it perform the relative operation in the database.
  2. +
  3. The CR manager prepares the response for csource subscription request and
  4. +
+

 5.1 sends the Http response back to the API gateway.

+

 5.2 API gateway sends back the response to the end-user/requester.

+
+
+

8. History¶

+
+_images/flow-6.png +
+

The Figure is showing the operational flow of entity subscription in the Scorpio Broker system. Following are the marked steps interpretation:

+
    +
  1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to the history manager in the form of an HTTP POST request.
  2. +
  3. The request enters in service API gateway.
  4. +
+

 a. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service.

+

 b.The service API gateway forwards the HTTP request to the History Manager micro-service.

+
    +
  1. The history manager now executes the EVA algorithm approach on the received payload and push payload attributes to Kafka topic “TEMPORALENTITYâ€.
  2. +
+

Note: History Manager must walk through each attribute at the root level of the object (except @id and @type). Inside each attribute, it must walk through each instance (array element). Then, it sends the current object to the Kafka topic TEMPORALENTITY.

+
    +
  1. The history manager will keep listening to the “TEMPORALENTITY†topic and for any new entry and performs the relative operation in the database.
  2. +
+
+ + +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/contributionGuideline.html b/scorpio-broker/docs/en/source/_build/html/contributionGuideline.html new file mode 100644 index 0000000000000000000000000000000000000000..4830cd05d205a0b8cac7356e79713c0143b7a98d --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/contributionGuideline.html @@ -0,0 +1,151 @@ + + + + + + + + 9. Branch Management Guidelines — ScorpioBroker documentation + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

9. Branch Management Guidelines¶

+
+_images/gitGuideline.jpg +
+

The community can have two main branches with an infinite lifetime:

+
    +
  1. Master branch: This is a highly stable branch that is always production-ready and contains the last release version of source code in production.
  2. +
  3. Development branch: Derived from the master branch, the development branch serves as a branch for integrating different features planned for an upcoming release. This branch may or may not be as stable as the master branch. It is where developers collaborate and merge feature branches. All of the changes should be merged back into the master somehow and then tagged with a release number.
  4. +
+

Apart from those two primary branches, there are other branches in the workflow:

+
    +
  • Feature Branch: Forked from the development branch for feature development i.e. enhancement or documentation. Merged back to the development branch after feature development or enhancement implementation.
  • +
  • Bug Branch: Ramify from the development branch. Merged back to the development branch after bug fixing.
  • +
  • Hotfix branch: Hotfix branches are created from the master branch. It is the current production release running live and causing troubles due to a severe bug. But changes in development are yet unstable. We may then branch off a hotfix branch and start fixing the problem. It should be the rarest occasion, in case only critical bugs.
  • +
+

Note: Only NLE and NECTI members have the privilege to create and merge the Hotfix branch.

+ + +++++ + + + + + + + + + + + + + + + + + + + + +
Branch naming convention¶
BranchBranches naming guidelineRemarks
Feature branchesMust branch from: development. Must merge back into: development. Branch naming convention: feature-feature_idfeature_id is the Github issue id from https://github.com/ScorpioBroker/ScorpioBroker/issues
Bug BranchesMust branch from: development. Must merge back into: development. Branch naming convention: bug-bug_idbug_id is the Github issue id from https://github.com/ScorpioBroker/ScorpioBroker/issues
Hotfix BranchesMust branch from: master branch. Must merge back into: master branch. Branch naming convention: hotfix-bug number.Bug number is the Github issue id from https://github.com/ScorpioBroker/ScorpioBroker/issues .
+
+

9.1. Permissions to the branches:¶

+
    +
  • Master - We tend to very strict that only NLE members and privileged members of NECTI can merge on the Master branch and accept the pull requests. Pull requests to master can be raised by only NECTI OR NLE members.
  • +
  • Development - Any community member can raise the pull request to the development branch but it should be reviewed by NLE or NECTI members. Development branches commits will be moved to the master branch only when all the test cases written under NGSI-LD test suites, will run successfully.
  • +
+
+
+ + +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/docker.html b/scorpio-broker/docs/en/source/_build/html/docker.html new file mode 100644 index 0000000000000000000000000000000000000000..fd19e88052f44e88419c3852e8074ba5c18f028c --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/docker.html @@ -0,0 +1,156 @@ + + + + + + + + 11. Getting a docker container — ScorpioBroker documentation + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

11. Getting a docker container¶

+

The current maven build supports two types of docker container generations from the build using maven profiles to trigger it.

+

The first profile is called ‘docker’ and can be called like this

+

+
+
+

    mvn clean package -DskipTests -Pdocker

+

this will generate individual docker containers for each microservice. The corresponding docker-compose file is docker-compose-dist.yml

+

The second profile is called ‘docker-aaio’ (for almost all in one). This will generate one single docker container for all components of the broker except the Kafka message bus and the Postgres database.

+

To get the aaio version run the maven build like this

+

+
+
+

    mvn clean package -DskipTests -Pdocker-aaio

+

The corresponding docker-compose file is docker-compose-aaio.yml

+
+

11.1. General remark for the Kafka docker image and docker-compose¶

+

The Kafka docker container requires you to provide the environment variable KAFKA_ADVERTISED_HOST_NAME. This has to be changed in the docker-compose files to match your docker host IP. You can use 127.0.0.1 however this will disallow you to run Kafka in a cluster mode.

+

For further details please refer to https://hub.docker.com/r/wurstmeister/kafka

+
+
+

11.2. Running docker build outside of Maven¶

+

If you want to have the build of the jars separated from the docker build you need to provide certain VARS to docker. +The following list shows all the vars and their intended value if you run docker build from the root dir

+

 - BUILD_DIR_ACS = Core/AtContextServer

+

 - BUILD_DIR_SCS = SpringCloudModules/config-server

+

 - BUILD_DIR_SES = SpringCloudModules/eureka

+

 - BUILD_DIR_SGW = SpringCloudModules/gateway

+

 - BUILD_DIR_HMG = History/HistoryManager

+

 - BUILD_DIR_QMG = Core/QueryManager

+

 - BUILD_DIR_RMG = Registry/RegistryManager

+

 - BUILD_DIR_EMG = Core/EntityManager

+

 - BUILD_DIR_STRMG = Storage/StorageManager

+

 - BUILD_DIR_SUBMG = Core/SubscriptionManager

+

 - JAR_FILE_BUILD_ACS = AtContextServer-${project.version}.jar

+

 - JAR_FILE_BUILD_SCS = config-server-${project.version}.jar

+

 - JAR_FILE_BUILD_SES = eureka-server-${project.version}.jar

+

 - JAR_FILE_BUILD_SGW = gateway-${project.version}.jar

+

 - JAR_FILE_BUILD_HMG = HistoryManager-${project.version}.jar

+

 - JAR_FILE_BUILD_QMG = QueryManager-${project.version}.jar

+

 - JAR_FILE_BUILD_RMG = RegistryManager-${project.version}.jar

+

 - JAR_FILE_BUILD_EMG = EntityManager-${project.version}.jar

+

 - JAR_FILE_BUILD_STRMG = StorageManager-${project.version}.jar

+

 - JAR_FILE_BUILD_SUBMG = SubscriptionManager-${project.version}.jar

+

 - JAR_FILE_RUN_ACS = AtContextServer.jar

+

 - JAR_FILE_RUN_SCS = config-server.jar

+

 - JAR_FILE_RUN_SES = eureka-server.jar

+

 - JAR_FILE_RUN_SGW = gateway.jar

+

 - JAR_FILE_RUN_HMG = HistoryManager.jar

+

 - JAR_FILE_RUN_QMG = QueryManager.jar

+

 - JAR_FILE_RUN_RMG = RegistryManager.jar

+

 - JAR_FILE_RUN_EMG = EntityManager.jar

+

 - JAR_FILE_RUN_STRMG = StorageManager.jar

+

 - JAR_FILE_RUN_SUBMG = SubscriptionManager.jar

+
+
+ + +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/errorHandling.html b/scorpio-broker/docs/en/source/_build/html/errorHandling.html new file mode 100644 index 0000000000000000000000000000000000000000..34ee9c0a8e02df7ac180a6741a5644a056f64fa7 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/errorHandling.html @@ -0,0 +1,196 @@ + + + + + + + + 4. Error Handler — ScorpioBroker documentation + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

4. Error Handler¶

+

This section will provide info on the error handling mechanism for the Scorpio Broker system.

+

Listed below are the events of the system

+ + ++++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Error Handling¶
S.No.Operation/EventScenario DescriptionResponsible  ModuleError Code/ ResponseAction
    +
  1. +
+
InvalidRequestThe request associated to the operation is syntactically invalid or includes wrong contentREST ControllerHTTP 400Log the error & notify the requestor
    +
  1. +
+
BadRequestDataThe request includes input data which does not meet the requirements of the operationREST ControllerHTTP 400Log the error & notify the requestor
    +
  1. +
+
AlreadyExistsThe referred element already existsREST ControllerHTTP 409Log the error & notify the requestor
    +
  1. +
+
OperationNotSupportedThe operation is not supportedREST ControllerHTTP 422Log the error & notify the requestor
    +
  1. +
+
ResourceNotFoundThe referred resource has not been foundREST ControllerHTTP 404Log the error & notify the requestor
    +
  1. +
+
InternalErrorThere has been an error during the operation executionREST ControllerHTTP 500Log the error & notify the requestor
    +
  1. +
+
Method Not AllowedThere has been an error when a client invokes a wrong HTTP verb over a resourceREST ControllerHTTP 405Log the error & notify the requestor
+

Please note the errors can also be categorized into following categories for different exceptions that can occur internally to the implementation logic as well:

+

 1. Low criticality is those which involve the errors that should be handled by the software logic, and are due to some configuration issues and should not require anything like reset, a reboot of the system.

+

 2. Medium Criticality is those which will be tried for the software logic handling but it may need system reset, chip reset and may interrupt system significantly.

+

 3. High Criticality is the hardware-based error that should not occur and if occur may need system reset.

+

Fail-safe mechanisms for the different category of errors:

+

 a. For the Low criticality of the errors, logging will be performed, the retry will be performed and error will be handled by means of rollback and sending failure to the upper layers.

+

 b.For the High Criticality errors, emergency errors will be logged further recommending a reboot.

+

 c.For the Medium criticality errors logging, retry mechanisms will be implemented further logging emergency logs to the system and recommend a reboot to the administrator.

+

During the initialization, failure will be logged as emergency and error will be returned to the calling program

+
+ + +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/genindex.html b/scorpio-broker/docs/en/source/_build/html/genindex.html new file mode 100644 index 0000000000000000000000000000000000000000..0b4aaf138628922a4004f1e3e6405c43d77b06b0 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/genindex.html @@ -0,0 +1,83 @@ + + + + + + + + + Index — ScorpioBroker documentation + + + + + + + + + + + + + + + + + + + +
+
+
+
+ + +

Index

+ +
+ +
+ + +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/gitGuideline.html b/scorpio-broker/docs/en/source/_build/html/gitGuideline.html new file mode 100644 index 0000000000000000000000000000000000000000..33f0202f49d5e0ff687309b5b37d8d40a8971bd8 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/gitGuideline.html @@ -0,0 +1,133 @@ + + + + + + + + Branch Management Guidelines — ScorpioBroker documentation + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

Branch Management Guidelines¶

+
+_images/gitGuideline.jpg +
+

The community can have two main branches with an infinite lifetime:

+
    +
  1. Master branch: This is a highly stable branch that is always production-ready and contains the last release version of source code in production.
  2. +
  3. Development branch: Derived from the master branch, the development branch serves as a branch for integrating different features planned for an upcoming release. This branch may or may not be as stable as the master branch. It is where developers collaborate and merge feature branches. All of the changes should be merged back into the master somehow and then tagged with a release number.
  4. +
+

Apart from those two primary branches, there are other branches in the workflow:

+
    +
  • Feature Branch: Forked from the development branch for feature development i.e. enhancement or documentation. Merged back to the development branch after feature development or enhancement implementation.
  • +
  • Bug Branch: Ramify from the development branch. Merged back to the development branch after bug fixing.
  • +
  • Hotfix branch: Hotfix branches are created from the master branch. It is the current production release running live and causing troubles due to a severe bug. But changes in development are yet unstable. We may then branch off a hotfix branch and start fixing the problem. It should be the rarest occasion, in case only critical bugs.
  • +
+

Note: Only NLE and NECTI members have the privilege to create and merge the Hotfix branch.

+

   :widths: 20 40 40 +   :header-rows: 1

+

   * - Branch +     - Branches naming guideline +     - Remarks

+

   * - Feature branches +     - Must branch from: development. Must merge back into: development. Branch naming convention: feature-feature_id +     - feature_id is the Github issue id from https://github.com/ScorpioBroker/ScorpioBroker/issues

+

   * - Bug Branches +     - Must branch from: development. Must merge back into: development. Branch naming convention: bug-bug_id +     - bug_id is the Github issue id from https://github.com/ScorpioBroker/ScorpioBroker/issues

+

   * - Hotfix Branches +     - Must branch from: master branch. Must merge back into: master branch. Branch naming convention: hotfix-bug number. +     - Bug number is the Github issue id from https://github.com/ScorpioBroker/ScorpioBroker/issues

+
+

Permissions to the branches:¶

+
    +
  • Master - We tend to very strict that only NLE members and privileged members of NECTI can merge on the Master branch and accept the pull requests. Pull requests to master can be raised by only NECTI OR NLE members.
  • +
  • Development - Any community member can raise the pull request to the development branch but it should be reviewed by NLE or NECTI members. Development branches commits will be moved to the master branch only when all the test cases written under NGSI-LD test suites, will run successfully.
  • +
+
+
+ + +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/hardwareRequirement.html b/scorpio-broker/docs/en/source/_build/html/hardwareRequirement.html new file mode 100644 index 0000000000000000000000000000000000000000..aafd6fdb19cbf146d706eccd8295caa3810ab0e3 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/hardwareRequirement.html @@ -0,0 +1,148 @@ + + + + + + + + 1. Java 8 System Requirements — ScorpioBroker documentation + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

1. Java 8 System Requirements¶

+

Windows

+
    +
  • Windows 10 (8u51 and above)
  • +
  • Windows 8.x (Desktop)
  • +
  • Windows 7 SP1
  • +
  • Windows Vista SP2
  • +
  • Windows Server 2008 R2 SP1 (64-bit)
  • +
  • Windows Server 2012 and 2012 R2 (64-bit)
  • +
  • RAM: 128 MB
  • +
  • Disk space: 124 MB for JRE; 2 MB for Java Update
  • +
  • Processor: Minimum Pentium 2 266 MHz processor
  • +
  • Browsers: Internet Explorer 9 and above, Firefox
  • +
+

Mac OS X

+
    +
  • Intel-based Mac running Mac OS X 10.8.3+, 10.9+
  • +
  • Administrator privileges for installation
  • +
  • 64-bit browser
  • +
  • A 64-bit browser (Safari, for example) is required to run Oracle Java on Mac.
  • +
+

Linux

+
    +
  • Oracle Linux 5.5+1
  • +
  • Oracle Linux 6.x (32-bit), 6.x (64-bit)2
  • +
  • Oracle Linux 7.x (64-bit)2 (8u20 and above)
  • +
  • Red Hat Enterprise Linux 5.5+1, 6.x (32-bit), 6.x (64-bit)2
  • +
  • Red Hat Enterprise Linux 7.x (64-bit)2 (8u20 and above)
  • +
  • Suse Linux Enterprise Server 10 SP2+, 11.x
  • +
  • Suse Linux Enterprise Server 12.x (64-bit)2 (8u31 and above)
  • +
  • Ubuntu Linux 12.04 LTS, 13.x
  • +
  • Ubuntu Linux 14.x (8u25 and above)
  • +
  • Ubuntu Linux 15.04 (8u45 and above)
  • +
  • Ubuntu Linux 15.10 (8u65 and above)
  • +
  • Browsers: Firefox
  • +
+
+
+

2. ZooKeeper Requirements¶

+

ZooKeeper runs in Java, release 1.6 or greater (JDK 6 or greater). +It runs as an ensemble of ZooKeeper servers. +Three ZooKeeper servers are the minimum recommended size for an ensemble, and we also recommend that they run on separate machines. +At Yahoo!, ZooKeeper is usually deployed on dedicated RHEL boxes, with dual-core processors, 2GB of RAM, and 80GB IDE hard drives.

+
+
+

3. Recommendations for Kafka¶

+

Kafka brokers use both the JVM heap and the OS page cache. The JVM heap is used for the replication of partitions between brokers and for log compaction. Replication requires 1MB (default replica.max.fetch.size) for each partition on the broker. In Apache Kafka 0.10.1 (Confluent Platform 3.1), we added a new configuration (replica.fetch.response.max.bytes) that limits the total RAM used for replication to 10MB, to avoid memory and garbage collection issues when the number of partitions on a broker is high. For log compaction, calculating the required memory is more complicated and we recommend referring to the Kafka documentation if you are using this feature. For small to medium-sized deployments, 4GB heap size is usually sufficient. In addition, it is highly recommended that consumers always read from memory, i.e. from data that was written to Kafka and is still stored in the OS page cache. The amount of memory this requires depends on the rate at which this data is written and how far behind you expect consumers to get. If you write 20GB per hour per broker and you allow brokers to fall 3 hours behind in normal scenario, you will want to reserve 60GB to the OS page cache. In cases where consumers are forced to read from disk, performance will drop significantly

+

Kafka Connect itself does not use much memory, but some connectors buffer data internally for efficiency. If you run multiple connectors that use buffering, you will want to increase the JVM heap size to 1GB or higher.

+

Consumers use at least 2MB per consumer and up to 64MB in cases of large responses from brokers (typical for bursty traffic). Producers will have a buffer of 64MB each. Start by allocating 1GB RAM and add 64MB for each producer and 16MB for each consumer planned.

+
+ + +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/index.html b/scorpio-broker/docs/en/source/_build/html/index.html new file mode 100644 index 0000000000000000000000000000000000000000..60861f939b85d7949f48bf08c8228150ad144e4b --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/index.html @@ -0,0 +1,130 @@ + + + + + + + + <no title> — ScorpioBroker documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/introduction.html b/scorpio-broker/docs/en/source/_build/html/introduction.html new file mode 100644 index 0000000000000000000000000000000000000000..dfaafee25217bfa3708f115c019a363e5aba66bd --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/introduction.html @@ -0,0 +1,111 @@ + + + + + + + + 1. Introduction — ScorpioBroker documentation + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

1. Introduction¶

+

In a today’s era where people can’t imagine there lives without internet same is with our devices, nowadays most of our devices are integrated with the IoT, which give us plethora of advantage but left us with few complexities as well. +One of these is making these devices interact with each other, where each device makes use of different schemas, so to mitigate this issue we have a one-stop solution.

+

Scorpio broker is a java based pub-sub service designed and developed for the FIWARE platform, build on the top of spring boot architecture using NGSI-LD concepts. +Scorpio broker allows you to collect, process, notify, and store the IoT data with dynamic context with the use of linked data concepts. +It makes use of the microservice-based architecture which has its own advantages over the existing IoT brokers such as scalability, cross-technology integration, etc.

+

Scorpio Broker based on NGSI-LD offers a unique feature of Link data context that provides self-contained (or referenced) dynamic schema definition (i.e. the context) for contained data in each message/entity. +Thus allows the Scorpio Broker core processing to still remain unified even it gets dynamic context-driven data as its input from different types of data sources coupled(or designed for) with different schemas.

+

Key advantages of Scorpio Broker over other brokers:

+
    +
  • Uses micro-service architecture which enhances the performance drastically.
  • +
  • The Scorpio Broker architecture is designed & implemented as a scalable, highly available, and load balanced.
  • +
  • Use of Ld which gives us the leverage of dynamic context.
  • +
  • Usage of Kafka, allowing us the robust pub-sub service with the facility of scaling with no downtime.
  • +
  • It provides fail-over resiliency.
  • +
  • It provides load balancing to distribute the load on distributed infrastructure.
  • +
  • It is modular enough to offer low coupling and high cohesion by design.
  • +
  • It offers different storage integration without changing the application logic time and again.
  • +
+
+ + +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/objects.inv b/scorpio-broker/docs/en/source/_build/html/objects.inv new file mode 100644 index 0000000000000000000000000000000000000000..3f2d86d283a0cfefaa467fd4301d3f4302d122ac Binary files /dev/null and b/scorpio-broker/docs/en/source/_build/html/objects.inv differ diff --git a/scorpio-broker/docs/en/source/_build/html/onepageTutorial.html b/scorpio-broker/docs/en/source/_build/html/onepageTutorial.html new file mode 100644 index 0000000000000000000000000000000000000000..9f89ef1a456e69bd6004dd49e04dbb9e87ed8182 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/onepageTutorial.html @@ -0,0 +1,296 @@ + + + + + + + + 1. Installation Guide — ScorpioBroker documentation + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

1. Installation Guide¶

+

In order to set-up the environment of Scorpio broker, the following dependency needs to be configured:-

+
    +
  1. Eclipse.
  2. +
  3. Server JRE.
  4. +
  5. ZooKeeper.
  6. +
  7. Apache Kafka.
  8. +
+
+
+

2. Windows¶

+
+

2.1. Eclipse installation¶

+
    +
  • Download the Eclipse Installer.:
  • +
+

 Download Eclipse Installer from http://www.eclipse.org/downloads.Eclipse is hosted on many mirrors around the world. Please select the one closest to you and start to download the Installer.

+
    +
  • Start the Eclipse Installer executable:
  • +
+

 For Windows users, after the Eclipse Installer, the executable has finished downloading it should be available in your download directory. Start the Eclipse Installer executable. You may get a security warning to run this file. If the Eclipse Foundation is the Publisher, you are good to select Run.

+

 For Mac and Linux users, you will still need to unzip the download to create the Installer. Start the Installer once it is available.

+
    +
  • Select the package to install:
  • +
+

 The new Eclipse Installer shows the packages available to Eclipse users. You can search for the package you want to install or scroll through the list. Select and click on the package you want to install.

+
    +
  • Select your installation folder
  • +
+

 Specify the folder where you want Eclipse to be installed. The default folder will be in your User directory. Select the ‘Install’ button to begin the installation.

+
    +
  • Launch Eclipse
  • +
+

 Once the installation is complete you can now launch Eclipse. The Eclipse Installer has done its work. Happy coding.

+
+
+

2.2. JRE Setup¶

+
    +
  • Start the JRE installation and hit the “Change destination folder†checkbox, then click ‘Install.’
  • +
+
+_images/jre-1.png +
+
    +
  • Change the installation directory to any path without spaces in the folder name. E.g. C:Javajre1.8.0_xx. (By default it will be C:Program FilesJavajre1.8.0_xx), then click ‘Next.’
  • +
+

After you’ve installed Java in Windows, you must set the  JAVA_HOME  environment variable to point to the Java installation directory.

+

Set the JAVA_HOME Variable

+

To set the JAVA_HOME variable:

+
    +
  1. Find out where Java is installed. If you didn’t change the path during installation, it will be something like this:
  2. +
+
+
C:Program FilesJavajdk1.8.0_65
+
    +
    • +
    • In Windows 7 right-click My Computer and select Properties > Advanced.
    • +
    +
  1. +
+
+
+
OR
+
    +
  • In Windows 8 go to Control Panel > System > Advanced System Settings.
  • +
+
+
    +
  1. Click the Environment Variables button.
  2. +
  3. Under System Variables, click New.
  4. +
  5. In the User Variable Name field, enter: JAVA_HOME
  6. +
  7. In the User Variable Value field, enter your JDK path.
  8. +
+
+_images/jre-2.png +
+

 (Java path and version may change according to the version of Kafka you are using)

+
    +
  1. Now click OK.
  2. +
  3. Search for a Path variable in the “System Variable†section in the “Environment Variables†dialogue box you just opened.
  4. +
  5. Edit the path and type ;%JAVA_HOME%bin at the end of the text already written there, just like the image below:
  6. +
+
+_images/jre-3.png +
+
    +
  • To confirm the Java installation, just open cmd and type “java –version.†You should be able to see the version of Java you just installed.
  • +
+
+_images/jre-4.png +
+

If your command prompt somewhat looks like the image above, you are good to go. Otherwise, you need to recheck whether your setup version matches the correct OS architecture (x86, x64), or if the environment variables path is correct.

+
+
+

2.3. ZooKeeper Installation¶

+
    +
  1. Go to your ZooKeeper config directory. For me its C:zookeeper-3.4.7conf
  2. +
  3. Rename file “zoo_sample.cfg†to “zoo.cfgâ€
  4. +
  5. Open zoo.cfg in any text editor, like Notepad; I prefer Notepad++.
  6. +
  7. Find and edit dataDir=/tmp/zookeeper to :zookeeper-3.4.7data
  8. +
  9. Add an entry in the System Environment Variables as we did for Java.
  10. +
+
+
    +
  1. Add ZOOKEEPER_HOME = C:zookeeper-3.4.7 to the System Variables.
  2. +
  3. Edit the System Variable named “Path†and add ;%ZOOKEEPER_HOME%bin;
  4. +
+
+
    +
  1. You can change the default Zookeeper port in zoo.cfg file (Default port 2181).
  2. +
  3. Run ZooKeeper by opening a new cmd and type zkserver.
  4. +
  5. You will see the command prompt with some details, like the image below:
  6. +
+
+_images/zookee.png +
+
+
+

2.4. Setting Up Kafka¶

+
    +
  1. Go to your Kafka config directory. For example:- C:kafka_2.11-0.9.0.0config
  2. +
  3. Edit the file “server.properties.â€
  4. +
  5. Find and edit the line log.dirs=/tmp/kafka-logs†to “log.dir= C:kafka_2.11-0.9.0.0kafka-logs.
  6. +
  7. If your ZooKeeper is running on some other machine or cluster you can edit â€œzookeeper.connect:2181†to your custom IP and port. For this demo, we are using the same machine so there’s no need to change. Also the Kafka port and broker.id are configurable in this file. Leave other settings as is.
  8. +
  9. Your Kafka will run on default port 9092 and connect to ZooKeeper’s default port, 2181.
  10. +
+

Note: For running Kafka, zookeepers should run first. At the time of closing Kafka, zookeeper should be closed first than Kafka.

+
+
+

2.5. Running a Kafka Server¶

+

Important: Please ensure that your ZooKeeper instance is up and running before starting a Kafka server.

+
    +
  1. Go to your Kafka installation directory:** C:kafka_2.11-0.9.0.0**
  2. +
  3. Open a command prompt here by pressing Shift + right-click and choose the “Open command window here†option).
  4. +
  5. Now type .binwindowskafka-server-start.bat .configserver.properties and press Enter.
  6. +
+

 .binwindowskafka-server-start.bat .configserver.properties

+
+
+

2.6. Setting up PostgreSQL¶

+

Step 1) Go to https://www.postgresql.org/download and select O.S., it’s Windows for me.

+

Step 2) You are given two options:-

+

 1. Interactive Installer by EnterpriseDB + 2. Graphical Installer by BigSQL

+

BigSQL currently installs pgAdmin version 3 which is deprecated. It’s best to choose EnterpriseDB which installs the latest version 4

+

Step 3)

+

 1. You will be prompted to the desired Postgre version and operating system. Select the Postgres 10, as Scorpio has been tested and developed with this version.

+

 2. Click the Download Button, Download will begin

+

Step 4) Open the downloaded .exe and Click next on the install welcome screen.

+

Step 5)

+

 1. Change the Installation directory if required, else leave it to default

+

 2.Click Next

+

Step 6)

+

 1. You can choose the components you want to install in your system. You may uncheck Stack Builder

+

 2. Click on Next

+

Step 7)

+

 1. You can change the data location

+

 2.Click Next

+

Step 8)

+

 1. Enter the superuser password. Make a note of it

+

 2.Click Next

+

Step 9)

+

 1. Leave the port number as the default

+

 2.Click Next

+

Step 10)

+

 1. Check the pre-installation summary.

+

 2.Click Next

+

Step 11) Click the next button

+

Step 12) Once install is complete you will see the Stack Builder prompt

+

 1. Uncheck that option. We will use Stack Builder in more advance tutorials

+

 2.Click Finish

+

Step 13) To launch Postgre go to Start Menu and search pgAdmin 4

+

Step 14) You will see pgAdmin homepage

+

Step 15) Click on Servers > Postgre SQL 10 in the left tree

+
+_images/dbconfig-1.png +
+

Step 16)

+

 1.Enter superuser password set during installation

+

 2. Click OK

+

Step 17) You will see the Dashboard

+
+_images/dbconfig-2.png +
+

That’s it to Postgre SQL installation.

+
+
+
+

3. Linux¶

+
+ + +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/search.html b/scorpio-broker/docs/en/source/_build/html/search.html new file mode 100644 index 0000000000000000000000000000000000000000..d43b07fdf4cfa5c6b22e2d27387778ee07b29141 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/search.html @@ -0,0 +1,94 @@ + + + + + + + + Search — ScorpioBroker documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Search

+
+ +

+ Please activate JavaScript to enable the search + functionality. +

+
+

+ From here you can search these documents. Enter your search + words into the box below and click "search". Note that the search + function will automatically search for all of the words. Pages + containing fewer words won't appear in the result list. +

+
+ + + +
+ +
+ +
+ +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/searchindex.js b/scorpio-broker/docs/en/source/_build/html/searchindex.js new file mode 100644 index 0000000000000000000000000000000000000000..098a2088d696c224b567298145f09734f3d28086 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/searchindex.js @@ -0,0 +1 @@ +Search.setIndex({docnames:["API_walkthrough","callFlow","contributionGuideline","docker","errorHandling","hardwareRequirement","index","introduction","onepageTutorial","security","systemOverview","troubleshooting"],envversion:53,filenames:["API_walkthrough.rst","callFlow.rst","contributionGuideline.rst","docker.rst","errorHandling.rst","hardwareRequirement.rst","index.rst","introduction.rst","onepageTutorial.rst","security.rst","systemOverview.rst","troubleshooting.rst"],objects:{},objnames:{},objtypes:{},terms:{"0_65":8,"0_xx":8,"0config":8,"0kafka":8,"10gb":[],"10mb":5,"16mb":5,"1gb":5,"1mb":5,"20gb":5,"2gb":5,"2mb":5,"4gb":5,"60gb":5,"64mb":5,"7conf":8,"7data":8,"80gb":5,"8u20":5,"8u25":5,"8u31":5,"8u45":5,"8u51":5,"8u65":5,"byte":5,"case":[1,2,5,9,10],"default":[5,8],"final":11,"function":[1,6,10],"import":8,"new":[1,5,8,9],"public":[],"return":4,"short":[1,9,10],"static":10,"super":[],"var":3,And:1,Bus:10,But:2,Doing:1,For:[1,3,4,5,8,10],IDE:5,LTS:5,Not:4,One:7,SAS:[],THE:10,ThE:10,That:[8,10],The:[1,2,3,4,5,7,8,9,10],Then:1,There:4,These:10,Use:7,Uses:7,With:[],aaio:3,abl:[1,8,10],about:10,abov:[5,8],accept:2,access:[9,10],accord:8,accordingli:1,account:9,achiev:1,act:9,action:4,actual:[1,10],adapt:[],add:[0,5,8,11],added:5,addit:5,addition:10,address:10,administr:[4,5],advanc:[6,8],advantag:7,after:[2,8],afterword:1,again:[7,9],against:[1,9],algorithm:1,all:[1,2,3,10],alloc:5,allow:[4,5,7,10],almost:3,along:1,alreadi:[0,1,4,8,9],alreadyexist:4,also:[1,4,5,8,9,10],alter:[],alwai:[2,5],ambigu:1,among:[1,10],amount:5,ani:[1,2,8,9,10],anyth:4,apach:[5,8],apart:2,api:[1,6,9,10,11],append:6,applic:[1,7,9,10],approach:1,architectur:[6,7,8],around:8,arrai:1,artifactid:11,associ:[1,4],atcontext:1,atcontextserv:3,attr:0,attribut:[0,1],attrid:0,auth:9,auth_cod:9,authent:9,author:9,avail:[7,8,10],avoid:[1,5],back:[1,2,9,10],badrequestdata:4,balanc:7,base:[1,4,5,7,9,10],basic:1,bat:8,been:[4,8,9,10,11],befor:[8,11],begin:8,beginn:6,behalf:9,behind:5,being:[1,9],below:[4,8,11],best:8,better:[],between:5,bigdata:10,bigsql:8,bin:8,bind:11,binwindowskafka:8,bit:5,boot:7,both:[1,5],box:[5,8],branch:6,brief:[],broker:[0,1,3,4,5,7,8,9,10],browser:[5,9],buffer:5,bug:2,bug_id:2,build:[1,7],build_dir_ac:3,build_dir_emg:3,build_dir_hmg:3,build_dir_qmg:3,build_dir_rmg:3,build_dir_s:3,build_dir_sc:3,build_dir_sgw:3,build_dir_strmg:3,build_dir_submg:3,builder:8,bursti:5,bus:[3,10],button:8,bypass:9,cach:5,calcul:5,call:[1,3,4],can:[1,2,3,4,7,8,9,10],cannot:1,care:10,categor:4,categori:4,caus:2,certain:[1,3,10],cfg:8,chang:[1,2,3,7,8,10],check:[1,8,9,10],checkbox:8,chip:4,choic:1,choos:8,clean:3,click:8,client:[4,9,10],close:8,closest:8,cloud:10,cluster:[3,8],cmd:8,code:[2,4,8,9],cohes:7,collabor:2,collect:[5,7],com:[2,3,11],combin:10,come:9,command:8,commit:2,common:1,commun:[2,10],compact:5,complet:[8,11],complex:[1,7,10],compliant:[1,9],complic:5,compon:[3,8,9,10],comput:[8,10],concept:7,concern:10,condit:1,conext:1,conf:[],config:[3,8],configserv:8,configur:[4,5,8,9],confirm:8,confluent:5,connect:[5,8,10],connector:[5,10],conserv:[],consum:[1,5,10],contain:[1,2,6,7],content:4,context:[0,6,7,9,10],continu:1,contrib:[],control:[4,8],convent:[],core:[3,5,7,11],correct:8,correspond:3,could:10,coupl:7,cover:10,creat:[0,2,6,8],creation:1,credenti:9,criteria:9,critic:[2,4],cross:[7,10],crqueri:1,crqueryresult:1,crud:10,csour:1,csourc:[0,1,10],csourcesub:1,csourcesubscript:0,current:[1,2,3,8],custom:8,dashboard:8,data:[1,4,5,7,8,10],databas:[1,3,10],datadir:8,decid:9,decoupl:[1,10],dedic:5,definit:7,delet:[0,10],delta:1,demo:8,deni:9,depend:[5,8],deploi:[5,10],deploy:[5,6],deprec:[8,11],deregist:10,deriv:2,descript:[0,4],design:[1,7],desir:8,desired_throughput:[],desktop:5,destin:8,detail:[0,3,8,9],determin:10,develop:[2,7,8],devic:[0,7],dialogu:8,did:8,didn:8,differ:[1,2,4,7,10],dir:[3,8],direct:1,directli:1,directori:8,disallow:3,discov:[1,10],discoveri:[1,10],disk:5,dist:3,distribut:[7,10],docker:6,document:[2,5,10],doe:[4,5,9,10],domain:10,done:[1,8],down:[],download:8,downtim:7,drastic:7,drive:5,driven:7,drop:5,dskiptest:3,dual:5,due:[2,4],dure:[4,8],dynam:[7,10],each:[1,3,5,7,10],easi:[],easier:10,easili:10,eclips:[],edit:8,editor:8,effici:5,eight:[],either:[],element:[1,4],els:[1,8],embed:1,emerg:4,enabl:9,encrypt:[],end:[1,8,9,10],endpoint:1,enhac:[],enhanc:[2,7],enough:7,ensembl:5,ensur:8,enter:[1,8],enterpris:5,enterprisedb:8,entir:9,entiti:[0,6,7,10],entity_cr:1,entityid:[0,1],entitymanag:3,entri:[1,8],environ:[3,8,10],era:7,error:[1,6,9],especi:10,estim:[],etc:[1,7,10],eureka:[3,11],eva:1,even:7,event:4,everi:10,exampl:[5,8,10],except:[1,3,4,11],exchang:10,exe:8,execut:[1,4,8,10],exist:[0,1,4,7,9],exit:1,expand:10,expect:5,explicitli:1,explor:5,expos:[1,10],express:1,extend:10,extens:[],extern:[0,10],extract:9,face:11,facil:7,fail:[4,7],failov:[],failur:4,fall:5,far:5,faster:[],fault:10,favor:1,featur:[2,5,7,9],feature_id:2,feder:10,fetch:[1,5,9,10],few:7,field:[1,8],figur:1,file:[3,8],filesjavajdk1:8,filesjavajre1:8,fill:1,find:[8,9],finish:8,firefox:5,first:[1,3,8],fiwar:7,fix:[2,11],flexibl:1,flow:[1,6,10],folder:8,follow:[1,3,4,8],forc:5,fork:2,form:[1,9],format:10,formula:[],forward:[1,9],found:4,foundat:8,framework:10,from:[1,2,3,5,7,8,9,10,11],further:[1,3,4,10],futur:1,garbag:5,gatewai:[1,3,9,10],gather:1,gatwai:[],geo:[1,10],get:[0,1,5,6,7,8,10,11],gige:[],gisdb:[],github:2,give:7,given:[1,8,9],good:8,grant:[],graphic:8,greater:5,groupid:11,guid:6,guidelin:6,handl:10,handler:[6,10],happi:8,hard:[1,5],hardwar:4,has:[1,3,4,7,8,9,11],hat:5,have:[1,2,3,5,7,10],header:[],health:10,heap:5,help:[1,10],here:8,high:[4,5,7,10],higher:5,highli:[2,5,7,10],him:[],histori:[3,6],historymanag:3,hit:8,homepag:8,host:[3,8],hotfix:2,hour:5,how:5,howev:[1,3],http:[0,1,2,3,4,8,10],hub:3,huge:10,ideal:[],ident:9,imag:8,imagin:7,impl:11,implement:[2,4,7,10],includ:[4,9],incom:[1,9,10],increas:5,independ:[1,10],individu:3,infinit:2,info:[4,9],inform:[1,10],infrastructur:[7,10],initi:4,input:[4,7],insid:1,instal:[5,6],instanc:[1,8,10],instead:1,integr:[2,7],intel:5,intend:3,inter:10,interact:[7,8],intercommun:10,interfac:[0,1,9,10],intern:[1,4,5,10],internalerror:4,internet:[5,7],interpret:1,interrupt:4,introduc:9,introduct:6,invalid:4,invalidrequest:4,invok:[4,10],involv:[1,4],iot:[1,7,10],issu:[2,4,5,7,9,11],item:1,its:[0,7,8,9,10],itself:5,jar:3,jar_file_build_ac:3,jar_file_build_emg:3,jar_file_build_hmg:3,jar_file_build_qmg:3,jar_file_build_rmg:3,jar_file_build_s:3,jar_file_build_sc:3,jar_file_build_sgw:3,jar_file_build_strmg:3,jar_file_build_submg:3,jar_file_run_ac:3,jar_file_run_emg:3,jar_file_run_hmg:3,jar_file_run_qmg:3,jar_file_run_rmg:3,jar_file_run_s:3,jar_file_run_sc:3,jar_file_run_sgw:3,jar_file_run_strmg:3,jar_file_run_submg:3,java:[6,7,8,11],java_hom:8,javajre1:8,javax:11,jaxbcontext:11,jdk:[5,8],jre:5,json:10,jump:1,just:8,jvm:5,kafka:[1,6,7,10],kafka_2:8,kafka_advertised_host_nam:3,keep:1,kei:7,know:[1,9,10],lag:[],lang:11,larg:5,last:2,latest:8,launch:8,layer:[4,9],ldcontext:1,learn:10,least:5,leav:8,left:[7,8],level:[1,10],leverag:[7,10],lies:[],life:10,lifetim:2,like:[1,3,4,8,11],limit:[1,5],line:8,link:7,linux:[5,6],list:[1,3,4,8],listen:1,live:[2,7],load:7,local:[9,10],localhost:[],locat:[1,8,10],log:[4,5,8,9],logic:[4,7],login:9,look:8,lot:10,low:[4,7],mac:[5,8],machin:[5,8,11],mai:[1,2,4,8,10,11],main:2,maintain:1,make:[7,8,10],manag:[1,6,9,10],mani:8,manual:11,map:[1,10],mark:[1,11],master:2,match:[3,8,9],maven:11,max:5,md5:[],mean:4,mechan:[1,4,10],medium:[4,5],meet:4,member:2,memori:5,mention:1,menu:8,merg:2,messag:[1,3,7,9,10],metadata:1,method:[4,10],methodolog:10,mhz:5,micro:[1,7,10],microservic:[3,7],might:[1,10],minimum:5,mirror:8,mitig:7,mode:[3,10],model:[1,10],modifi:1,modul:[0,4,10],modular:7,monitor:10,more:[5,8],most:7,mostli:1,move:[1,2],much:5,multipl:[5,10],must:[1,2,8],mvn:3,mysql:10,name:[0,1,8,10],necessari:1,necti:2,need:[1,3,4,8,10,11],next:8,ngb:[],ngsi:[0,1,2,7,10],ngsi_ld:1,nic:[],nle:2,node:[],non:1,normal:5,note:[1,2,4,8],notepad:8,notif:[1,10],notifi:[1,4,7],now:[1,8,9],nowadai:7,num_brok:[],number:[2,5,8,10],oauth2:9,object:1,occas:2,occur:4,off:2,offer:[1,7],often:10,onc:[1,8,10],one:[1,3,7,8,10],onli:[1,2],open:8,oper:[1,4,8,9,10],operationnotsupport:4,option:[1,8],oracl:5,order:[8,10,11],org:8,origin:9,other:[1,2,7,8,10],otherwis:8,our:[7,9],out:[8,9],over:[1,4,7,10],overview:[],own:7,packag:[3,8,11],page:5,panel:8,parallel:1,partial:0,particular:0,partit:5,password:8,patch:[0,10],path:8,payload:1,pdocker:3,peer:[],pentium:5,peopl:7,per:[1,5,10],perform:[1,4,5,7,10],period:1,permiss:9,persist:10,pg_hba:[],pgadmin:8,physic:[],plan:[2,5],platform:[5,7],pleas:[3,4,8],plethora:7,point:[1,8],pom:11,port:[0,8,10],post:[0,1,10],postgi:10,postgr:[3,8,10],postgresql10data:[],postgresql:10,pre:8,prefer:8,prepar:1,present:[1,9,10,11],press:8,previous:1,primari:2,privileg:[2,5,9],problem:2,process:[1,7,10],processor:5,produc:[5,10],product:2,profil:3,program:[4,8],project:3,prompt:8,properti:8,protect:9,provid:[1,3,4,7,9,10],proxi:10,pub:[1,7,10],publish:[1,8],pull:[1,2],purpos:[9,10],push:1,put:1,queri:[6,10],querymanag:3,queryresult:1,queue:[1,10],rais:[1,2],ram:5,ramifi:2,rarest:2,rate:5,rather:1,reach:10,read:[5,9],readi:2,real:10,realiti:10,realm:9,reason:9,reboot:4,receiv:[1,9],recheck:8,recogn:9,recognis:[],recommend:[4,6],record:1,red:5,redirect:9,reduc:[],refer:[3,4,5],referenc:7,regist:[0,1,10],registr:[0,6,10],registrationid:0,registri:[1,3,10],registrymanag:3,regular:1,rel:1,relai:9,relat:[1,10],relationship:10,releas:[1,2,5],relev:10,remain:[1,7],remark:2,remot:[1,10],remov:11,renam:8,render:9,repeat:1,replic:5,replica:5,request:[1,2,4,6,10],requestor:[1,4],requir:[1,3,4,6,8,10],reserv:5,reset:4,resgistr:0,resil:[],resili:[7,10],resolv:[1,10],resourc:[4,9],resourcenotfound:4,respond:[1,9],respons:[1,4,5,9,10],rest:[1,4,9,10],restart:[],result:1,retri:4,retriev:[0,1],reus:9,revert:9,review:2,rhel:5,right:8,robust:7,role:9,rollback:4,root:[1,3],rout:[9,10],row:[],run:[2,5,10,11],safari:5,safe:4,same:[1,7,8,9],save:[1,10],scalabl:[7,10],scale:[7,10],scenario:[4,5],schema:[7,10],scorpio:[0,1,4,7,8,9,10],scorpiobrok:[2,11],screen:8,scroll:8,search:8,search_path:[],sec:[],second:3,section:[1,4,8,10],secur:[6,8,10],see:8,select:8,self:7,send:[1,4,9,10],sent:1,separ:[1,3,5],serv:[1,2,10],server:[3,5,11],servic:[1,7,9,10],session:9,set:1,setup:[],sever:2,shift:8,should:[2,4,8],show:[1,3,8],shutdown:10,significantli:[4,5],simpl:10,simpler:1,simplif:1,simplifi:1,sinc:9,singl:[1,3,10],situat:1,size:5,small:5,softwar:4,solut:7,solv:10,some:[1,4,5,8,9],somehow:2,someth:8,somewhat:8,sourc:[0,2,6,7,10],sp1:5,sp2:5,space:[5,8],spec:[1,9],specif:[1,10],specifi:8,speed:[],spring:[7,10],springcloudmodul:[3,11],sql:8,ssd:[],stabl:2,stack:[8,10],standalon:10,start:[1,2,5,8,11],startup:10,step:[1,8],still:[5,7,8],stop:7,storag:[1,3,7,10],storagemanag:3,store:[1,5,7,10],stream:10,strict:2,structur:[],sub:[7,10],submit:9,subscrib:[1,10],subscript:[0,6,10],subscriptionid:0,subscriptionmanag:3,success:[1,9],successfulli:2,sudo:[],suffici:5,suit:2,summari:8,sun:11,superus:8,support:[1,3,4,10],suse:5,syntact:4,system:[1,4,6,8,9,10],tabl:[1,10],tag:2,take:[1,10],technolog:[7,10],temporalent:1,tend:2,test:[2,8],text:8,than:[8,9],thei:[5,10],them:10,therefor:1,thi:[1,2,3,4,5,7,8,9,10,11],those:[1,2,4,10],thread:1,three:5,through:[1,8,10],thrown:9,thu:[7,10],time:[1,7,8,9,10],tmp:8,to128:[],todai:7,token:9,toler:10,top:7,topic:[1,10],total:5,traffic:5,transfer:[],transform:10,tree:8,tri:4,trigger:[1,3],troubl:2,troubleshoot:6,tutori:8,two:[2,3,8],type:[1,3,7,8,10,11],typenotpresentexcept:11,typic:5,ubuntu:5,uncheck:8,under:[1,2,8,10],unifi:[7,10],uniqu:7,unit:10,unstabl:2,unzip:8,upcom:2,updat:[0,5,6,10],upon:[1,9],upper:4,url:[9,10],usag:7,use:[3,5,7,8,10],used:[1,5,10],user:[1,6,8,9,10],uses:[9,10],using:[3,5,7,8,9,10],usual:[5,10],valid:[1,9],valu:[1,3,8],variabl:[3,8],verb:[0,4],veri:[1,2,11],verifi:10,version:[2,3,8,11],via:10,view:[],vim:[],visibl:10,vista:5,wai:10,walk:1,walkthrough:6,want:[3,5,8,10],warn:8,web:10,welcom:8,well:[1,4,7],when:[1,2,4,5,9,10,11],where:[1,2,5,7,8,9,10],whether:[8,9],which:[0,1,4,5,7,8,9,10],who:1,whole:1,whom:1,why:[1,9],width:[],window:[5,6],without:[1,7,8],work:8,workflow:2,world:[8,10],would:[1,10],write:[1,5],written:[2,5,8],wrong:4,wurstmeist:3,www:8,x64:8,x86:8,xml:11,yahoo:5,yes:9,yet:2,yml:3,you:[3,5,7,8,11],your:[3,8,11],zip:[],zkserver:8,zoo:8,zoo_sampl:8,zookeep:6,zookeeper_hom:8},titles:["10. API Walkthrough","3. Entity Create/Update/Append","9. Branch Management Guidelines","11. Getting a docker container","4. Error Handler","1. Java 8 System Requirements","<no title>","1. Introduction","1. Installation Guide","5. Security Architecture","1. Architecture","12. Troubleshooting"],titleterms:{"function":9,api:0,append:1,architectur:[9,10],beginn:[],branch:2,build:3,compos:3,configur:[],contain:3,context:1,convent:2,creat:1,depend:11,deploy:10,docker:3,eclips:8,entiti:1,error:4,first:[],flow:9,gener:3,get:3,guid:8,guidelin:2,handl:4,handler:4,histori:1,imag:3,instal:8,introduct:7,java:5,jaxb:11,jdk:[],jre:8,kafka:[3,5,8],linux:8,list:0,manag:2,maven:3,miss:11,name:2,outsid:3,overview:[],permiss:2,postgresql:8,queri:1,recommend:5,registr:1,remark:3,request:9,requir:5,run:[3,8],secur:9,server:8,set:8,setup:8,sourc:1,subscript:1,system:5,time:[],troubleshoot:11,updat:1,walkthrough:0,window:8,zookeep:[5,8]}}) \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/security.html b/scorpio-broker/docs/en/source/_build/html/security.html new file mode 100644 index 0000000000000000000000000000000000000000..1f68a370c96aa9b59cdfe8dfadece67cfa296946 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/security.html @@ -0,0 +1,124 @@ + + + + + + + + 5. Security Architecture — ScorpioBroker documentation + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

5. Security Architecture¶

+

Scorpio Broker system will also be responsible for any of the Identity & authentication management security. This will include authentication & authorization of requests, users, role base protected resources to access in Scorpio Broker security realm.

+

A new Authentication & Authorization service compliant to OAuth2.0 specs has been introduced that will provide the application layer security to the entire Scorpio Broker components & services.

+
+_images/security.png +
+
+
+

6. Security - Functional Request Flow¶

+
    +
  1. Browser/end user sends a resource request which is protected to the Scorpio Broker system using the API gateway REST interface.
  2. +
  3. API Gateway checks if the security feature is enabled.
  4. +
+

 a. If yes then, it checks if the request is already authenticated and already has some existing session.

+

 - If it does not find any session, then it forwards the request to Authentication & Authorization services. Or

+

 - If it finds any existing session than it reuses the same session for the authentication purpose and routes the request to the back-end resource service.

+

 b.If no security is enabled then, it bypasses security check and routes the request to the back-end resource service which is responsible to render the resource against the given request.

+
    +
  1. Now when the request comes at Authentication & Authorization (Auth in short) service, it responds to the original requester i.e. user/browser with a login form to present their identity based on credentials it has been issued to access the resource.
  2. +
  3. So now the user submits the login form with its credential to Auth service. Auth services validate the user credentials based on its Account details and now responded with successful login auth code & also the redirect U to which the user can redirect to fetch its resource request.
  4. +
  5. User/Browser now redirects at the redirect URL which is in our case is again the API gateway URL with the auth_code that it has received from the Auth service.
  6. +
  7. Now API gateway again checks the session where it finds the existing session context but now this time since it receives the auth_code in the request so it uses that auth_code and requests the token from Auth service acting as a client on user’s behalf. Auth service based on auth code recognized that it is already logged-in validated user and reverts back with the access token to the API gateway.
  8. +
  9. The API gateway upon receiving the token (with in the same security session context), now relays/routes to the back-end resource service for the original requested resource/operation.
  10. +
  11. The back-end resource service is also enabled with security features (if not error will be thrown for the coming secure request). It receives the request and reads the security context out of it and now validates (based on some extracted info) the same with the Auth service to know if this is a valid token/request with the given privileges. Auth service response backs and back-end service decide now whether the local security configuration and the auth service-based access permissions are matching.
  12. +
  13. If the access permissions/privileges are matched for the incoming request, then it responds back with the requested resources to the user/browser. In case, it does not match the security criteria than it reverts with the error message and reason why it’s being denied.
  14. +
+
+ + +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/systemOverview.html b/scorpio-broker/docs/en/source/_build/html/systemOverview.html new file mode 100644 index 0000000000000000000000000000000000000000..712e17baef2c91348dc305918cdcc9044a5688ad --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/systemOverview.html @@ -0,0 +1,132 @@ + + + + + + + + 1. Architecture — ScorpioBroker documentation + + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

1. Architecture¶

+

The deployment architecture leverages the Spring Cloud framework that addresses lots of Micro-services concerns(e.g. scaling, monitoring, fault-tolerant, highly available, secure, decoupled, etc. ) and Kafka based distributed and scalable message queue infrastructure to provide high performance on message processing for a huge number of context requests which is usual in the IoT domain.

+

It covers the high-level operations (HTTP based REST with method POST/GET/DELETE/PATCH) request flow from the external world to the Scorpio Broker system.  The external request is served through a unified service API gateway interface that exposes a single IP/port combination to be used for all services that the Scorpio Broker system can provide. In reality, each of the Scorpio Broker services have been implemented as a micro-service that can be deployed as an independent standalone unit in a distributed computing environment. ThE API gateway routes all the incoming requests to the specific Micro-services with the help of THE registration & discovery service. Once the request reaches a micro-service based on the operation requirement it uses(pub/sub) Kafka topics (message queues) for real-time storage and for providing intercommunication among different micro-services (based on requirement) over message queues.

+
+_images/architecture.png +
+
    +
  • Application: End-user/domain applications leverage Scorpio Broker to provide the required information about IoT infrastructure. This application can query, subscribe, update context information to/from the Scorpio Broker as per their requirements.
  • +
  • Consumers: These are the IoT entities or applications that consume the data of Scorpio Broker.
  • +
  • Producers: These are the IoT entities, context source, or applications that produce the context data to the Scorpio Broker.
  • +
  • Service API Gateway: This is the proxy gateway for the external world to access the internal services of the Scorpio Broker system exposed via REST-based HTTP interfaces. All internal Scorpio Broker related services can be accessed through this service gateway using its single IP & port (which are usually static) and extending the service name in the URL. Thus the user does not need to take care of (or learn or use) the IP and Port of every service which often changes dynamically. This makes life easier, especially in a case when multiple services (or micro-service) are running under one system. This is easily solved by the use of proxy gateway(i.e. service API gateway) for all the back-end services.
  • +
  • Rest Interface: These are the HTTP based interfaces for the external entities/applications to consume in order to execute certain operations on Scorpio Broker. The external interface would be visible through the Service API gateway and internal interface mapping to each requested service would be discovered through the service registration & discovery module.
  • +
  • Service Discovery & Registration: This component allows registration of any service (web service/micro-service) with it so that any client using discovery functionality of this component can determine the location of a service instance to which it wants to send requests. So in short, a service registry & discovery implements a database of services, their instances, and their locations. Service instances get registered with the service registry on startup and deregistered on shutdown. A client of the service, query the service registry, which discovers the available instances of a service. A service registry might also invoke a service instance’s health check API to verify that it is able to handle requests.
  • +
  • Entity Manager: This component handles all entity related CRUD operations with the help of other components of the Scorpio Broker.
  • +
  • LD Context Resolver: This component is responsible for expanding the NGSI-LD document based on the JSON-LD @context for further processing by the other components of the Scorpio Broker.
  • +
  • Subscription & Notification Manager: This component is responsible for handling CRUD operations related to entities and/or csource subscription & notification.
  • +
  • Query Manager: This component handles simple or complex queries (e.g. geo-query) to the Scorpio Broker.
  • +
  • Storage Manager: This component is responsible for fetching data from the message broker and then transforming them into relevant schema format in order to persist in DB tables. Additionally, this manager also provides interfaces for complex queries to the DB e.g. Geo query or cross-domain entity context relationship queries.
  • +
  • Context Registry Manager: This component is responsible for providing interfaces for CRUD operations of csource registration/query/ subscription.
  • +
  • Health Check & Monitoring: This component is responsible for monitoring the health of running services & infrastructure.
  • +
  • Message Bus Handler: Every module of the Scorpio Broker may need to communicate with the bus for the inter-module exchange of messages. This interface is provided by the message bus handler.
  • +
  • Storage Connectors: The Scorpio Broker needs to store certain information in different DB formats. So storage connectors (using any type of message broker methodology) provide the way to connect to those storage systems (which may be present locally or remotely). For example, the entity information could be stored in/streamed to a different types of storage systems e.g. MySQL, PostgreSQL, Bigdata, etc. These connectors could also be implemented for storage resiliency purposes.
  • +
  • Context Registry Connector: Scorpio Broker needs to communicate to the context registry in order to know about the registered context sources (brokers/providers) and the type of data model they support. The context registry connector allows the message broker mechanism to connect to the context registry that may be running locally or remotely in federated mode.
  • +
  • Storage: This is the actual storage (e.g. Postgres/Postgis) where data is persisted.
  • +
  • Context Registry: This is the component which is responsible for saving the registration of the context sources/producers.
  • +
+
+
+

2. Deployment Architecture¶

+

This section is covering the deployment architecture of the Scorpio Broker which is using different technologies stack.

+
+_images/deploymentarchitecture.png +
+

The deployment architecture leverages the Spring Cloud framework that addresses lots of Micro-services concerns(e.g. scaling, monitoring, fault-tolerant, highly available, secure, decoupled, etc. ) and Kafka based distributed and scalable message queue infrastructure to provide high performance on message processing for a huge number of context requests which is usual in the IoT domain. The deployment architecture covers the high-level operations (Http based REST with method POST/GET/DELETE/PATCH) request flow from the external world to the Scorpio Broker system.  The external request is served through a unified service API gateway interface that exposes a single IP/port combination to be used for all services that the Scorpio Broker system can provide. In reality, each of the Scorpio Broker services will be implemented as a micro-service that can be deployed as an independent standalone unit in a distributed computing environment. That API gateway routes all the incoming requests to the specific Micro-services with the help of registration & discovery service. Once the request reaches at micro-service based on the operation requirement it uses(pub/sub) Kafka topics (message queues) for real-time storage and for providing intercommunication among different micro-services (based on requirement) over message queues.

+
+ + +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_build/html/troubleshooting.html b/scorpio-broker/docs/en/source/_build/html/troubleshooting.html new file mode 100644 index 0000000000000000000000000000000000000000..83fc94e134f8444cbb1f66ef7bb897fa644b5018 --- /dev/null +++ b/scorpio-broker/docs/en/source/_build/html/troubleshooting.html @@ -0,0 +1,128 @@ + + + + + + + + 12. Troubleshooting — ScorpioBroker documentation + + + + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

12. Troubleshooting¶

+
+

12.1. Missing JAXB dependencies¶

+

When starting the eureka-server you may be facing the java.lang.TypeNotPresentException: Type javax.xml.bind.JAXBContext not present exception. It’s very likely that you are running Java 11 on your machine then. Starting from Java 9 package javax.xml.bind has been marked deprecated and was finally completely removed in Java 11.

+

In order to fix this issue and get eureka-server running you need to manually add below JAXB Maven dependencies to ScorpioBroker/SpringCloudModules/eureka/pom.xml before starting:

+

+
+
+

    … +    <dependencies> +            … +            <dependency> +                    <groupId>com.sun.xml.bind</groupId> +                    <artifactId>jaxb-core</artifactId> +                    <version>2.3.0.1</version> +            </dependency> +            <dependency> +                    <groupId>javax.xml.bind</groupId> +                    <artifactId>jaxb-api</artifactId> +                    <version>2.3.1</version> +            </dependency> +            <dependency> +                    <groupId>com.sun.xml.bind</groupId> +                    <artifactId>jaxb-impl</artifactId> +                    <version>2.3.1</version> +            </dependency> +            … +    </dependencies> +    …

+
+
+ + +
+
+
+ +
+
+ + + + + + + \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/_static/css/fiware_readthedocs.css b/scorpio-broker/docs/en/source/_static/css/fiware_readthedocs.css new file mode 100644 index 0000000000000000000000000000000000000000..12997138e6287bedcaadcf6dffc3aac62402b199 --- /dev/null +++ b/scorpio-broker/docs/en/source/_static/css/fiware_readthedocs.css @@ -0,0 +1,186 @@ + +/* highlight styles */ +@import url('https://fonts.googleapis.com/css?family=Montserrat%3A100%2C100italic%2C200%2C200italic%2C300%2C300italic%2Cregular%2Citalic%2C500%2C500italic%2C600%2C600italic%2C700%2C700italic%2C800%2C800italic%2C900%2C900italic&ver=4.9.6#038;subset=latin,latin-ext'); +@import url("monokai_sublime_.css"); +@import url("pygments-monokai.css"); + + +/* Estilos ogilvy v2 */ +.wy-nav-content {max-width: none !important} + + +body{font-family: Montserrat,Helvetica,Arial,sans-serif;font-weight: 500; line-height: 1.42857143 !important; color: #4c4c4c } +h1, h2, h3, h4 { + font-family: Montserrat,Helvetica,Arial,sans-serif !important; + color:#000000 !important; + font-weight: 800; + text-transform: uppercase; +} +h1, h2{ + padding-top : 20px; + padding-bottom : 3px; + box-shadow: 0px 7px 0px 0px #000000; +} +h3 { + box-shadow: 0px 4px 0px 0px #000000; +} +h4 { + margin-bottom : 5px; + color:#233c68 !important; + text-transform: none; +} + +code{padding:0px; border-width: 0px; background-color: transparent;font-size: 16px; color:#ff7059 !important} +pre code{color:#ffffff !important} +td { + border-color:#4c4c4c !important; +} +th { + background-color:#5dc0cf; + color: #fff; + border-color:#4c4c4c !important; +} +hr { + width: 200%; + margin-left: -50%; + padding-bottom : 6px; + box-shadow: 0px 3px 0px 0px #5dc0cf; + border-top : none; +} +div[role="navigation"] hr { + box-shadow: none; + border-top: 1px solid #e1e4e5; +} + + +a{font-weight: 600; color:#233c68 !important} +a:hover{color:#5dc0cf !important} +a.icon-home{color:#fff !important; +white-space : pre-line !important; +font-family:'Montserrat',Helvetica,Arial,Lucida,sans-serif !important; font-weight:normal !important;color:#000 !important; text-transform:uppercase !important; margin:0 0 0 0 !important} +.fa-home:before, .icon-home:before{content:None !important} +.wy-menu-vertical {color: #4c4c4c; background-color:#bec3cc} +.wy-menu-vertical .current {color: #4c4c4c; background-color:#ffffff; +border-right: #efefef solid 1px; } + +.wy-menu-vertical .current > a {background:#000000 !important; color : #ffffff !important} +.wy-menu-vertical .current > a {background:#000000 !important; color : #ffffff !important} +.wy-menu-vertical li.current a {border-right: none} +.wy-menu-vertical li.current a:hover{background-color: #000000; color:#ffffff !important} +.wy-menu-vertical li.current > a:hover{background-color:#000000 !important;border-right:0 !important} +.wy-menu-vertical a:hover{background-color: #000000; color:#ffffff !important} +.wy-side-nav-search {background: #fff !important; border-right: #efefef solid 1px;} +.wy-side-nav-search a{background-color:transparent !important; } +.wy-menu-vertical span{font-weight: 600 !important; color:#000000 !important;} +nav > div.wy-menu.wy-menu-vertical > ul > li > a{font-weight:600 !important; color:#000000 !important} +nav > div.wy-menu.wy-menu-vertical > ul > li.current > a{font-weight:600 !important; color:#ffffff !important} + +.wy-side-nav-search{margin-bottom:0 !important; text-align:left !important} +nav > div.wy-menu.wy-menu-vertical > ul > li > a:hover{color:#ffffff !important} +.btn-neutral{background-color:#233c68 !important color:#4c4c4c !important} +.btn-neutral:hover{background-color:#233c68 !important; color:#ffffff !important} +.btn-neutral:visited{color:#233c68 !important} +.btn-neutral:visited:hover{color:#ffffff !important} +.rst-versions{background:#000000 none !important;border-top:0 !important;font-family:Verdana !important} +.rst-versions .rst-current-version{background-color:#000000 !important} +.rst-versions a{color:#fff !important; text-decoration:underline !important} +.rst-versions a:hover{color:#FFF !important} +.fa-book,.fa{font-family:verdana !important} +.rst-versions .rst-other-versions{color:#fff !important} +.rst-versions .rst-other-versions hr{border-color:#fff !important} +.fa-home:before, .icon-home:after{content:} +a.icon-home {color:#000000 !important; + font-family: 'Montserrat',Helvetica,Arial,Lucida,sans-serif !important; + font-size: x-large !important; font-weight: 800 !important; + text-transform:uppercase !important; +} + + +.wy-side-nav-search{ + clear:both !important; + padding:40px 10px 16px 10px; + background-image: url('https://www.fiware.org/wp-content/uploads/2018/04/logo.png') !important; + background-size: 128px 30px !important; + background-repeat: no-repeat !important; + background-position: 15px 8px !important; + border-bottom-color : #000000; + border-bottom-width: 1px; + border-style: solid; +} + + +.rst-versions .rst-other-versions dt{font-weight:bold !important} +.wy-body-for-nav{background-image:none !important} +.wy-nav-side{background-color:transparent !important} +/*Fin estilos ogilvy*/ + + + +/* Custom width read the docs */ +.wy-nav-content {max-width: none !important; background-color: #ffffff} +/* to avoid display version when generated from rst */ +.wy-side-nav-search .version {display:none;} +/* Allow multiline project names in side navigation bar */ +.wy-side-nav-search a.icon-home {white-space: pre-wrap;} +.wy-side-nav-search input[type=text]{ + margin-top : 4px; + border-radius: unset; + background-color: #f5f5f5; + border-color:#000000; +} + +.wy-breadcrumbs { + text-transform: uppercase !important; + color : #000000; + font-weight: 600; + font-size: medium; +} + +.wy-nav-top { background-color:#000000;} +.wy-nav-top > a:hover {color: #ffffff !important} +.wy-nav-top > a{ text-transform:uppercase; color: #ffffff !important; font-size: x-large} + +.rst-content img,blockquote {max-width: 94%; background-color: #f5f5f5; border-style: solid; border-color : #4c4c4c; border-width : 1px; margin-left: 3%;} +.rst-content a img {background-color: transparent; border-style: none; margin-left: 0%; margin-right: 0%} +.rst-content blockquote {padding: 15px;} +.rst-content pre {border-style: solid; border-color : #000000; border-width : 1px; margin-left: 3%; margin-right: 3%;} +.rst-content pre code { background-color: #000000; margin: 1px} + +.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td { + background-color: #f5f5f5; +} +.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td img { + background-color: transparent; border-style: none; +} + +td { + border-color:#4c4c4c !important; +} +th { + background-color:#000000; + color: #fff; + border-color:#4c4c4c !important; +} + +th code { + color: #fff !important; +} + +hr { + width: 108%; + margin-left: -4%; + padding-bottom : 6px; + box-shadow: 0px 3px 0px 0px #23d3d3; +} + +div[role="main"]{ + margin-left:3%; + margin-right:3%; +} + + + + +h1:first-of-type+ul, +h2:first-of-type+ul{display: none} + diff --git a/scorpio-broker/docs/en/source/_static/css/fiware_readthedocs_core.css b/scorpio-broker/docs/en/source/_static/css/fiware_readthedocs_core.css new file mode 100644 index 0000000000000000000000000000000000000000..ab0689101fef59e0e2ecc798e3abb960d85577b5 --- /dev/null +++ b/scorpio-broker/docs/en/source/_static/css/fiware_readthedocs_core.css @@ -0,0 +1,87 @@ +body { + font-family: Montserrat,Helvetica,Arial,sans-serif; + font-weight: 500; + line-height: 1.42857143 !important; + color: #4c4c4c +} + +.wy-side-nav-search{ + clear:both !important; + padding:40px 10px 16px 10px; + background-image: url('https://www.fiware.org/wp-content/uploads/2018/04/logo.png') !important; + background-size: 128px 30px !important; + background-repeat: no-repeat !important; + background-position: 15px 8px !important; + border-bottom-color : #000000; + border-bottom-width: 1px; + border-style: solid; +} + +.wy-side-nav-search:before { + content: ""; + display: none +} +.wy-side-nav-search:after { + content: ""; + display: none; +} +.wy-nav-side {background-color:transparent !important} +.wy-nav-content {background-color: #fff} + +.wy-menu-vertical .current > a {background:#233c68 !important} +.wy-menu-vertical .current > a {background:#233c68 !important} +.wy-menu-vertical li.current a:hover {background-color: #233c68} +.wy-menu-vertical li.current > a:hover {background-color:#233c68 !important} +.wy-menu-vertical a:hover {background-color: #233c68} +.wy-menu-vertical span {color:#233c68 !important} +nav > div.wy-menu.wy-menu-vertical > ul > li > a {color:#233c68 !important} +.rst-versions {background:#233c68 none !important} +.rst-versions .rst-current-version {background-color:#233c68 !important} +.wy-side-nav-search {border-bottom-color : #233c68} +.wy-side-nav-search input[type=text] {border-color:#233c68} +.wy-nav-top {background-color: #233c68} +th {background-color:#233c68} + +h1, h2, h3, h4 { + font-family: Montserrat,Helvetica,Arial,sans-serif !important; + color:#000000 !important; + font-weight: 800; + text-transform: uppercase; +} +h1, h2{ + padding-top : 20px; + padding-bottom : 3px; + box-shadow: 0px 7px 0px 0px #233c68; +} +h3 { + box-shadow: 0px 4px 0px 0px #233c68; +} + +a.icon-home{ + color:#000 !important; + white-space : pre-line !important; + font-family:'Montserrat',Helvetica,Arial,Lucida,sans-serif !important; + font-size: x-large !important; font-weight: 800 !important; + text-transform:uppercase !important; + margin:0 0 0 0 !important +} + +.wy-side-nav-search { + background: #fff !important; border-right: #efefef solid 1px; + + clear:both !important; + padding:40px 10px 16px 10px; + background-image: url('https://www.fiware.org/wp-content/uploads/2018/04/logo.png') !important; + background-size: 128px 30px !important; + background-repeat: no-repeat !important; + background-position: 15px 8px !important; + border-bottom-color : #000000; + border-bottom-width: 1px; + border-style: solid; +} + +.wy-side-nav-search a{background-color:transparent !important; } +.wy-body-for-nav{background-image:none !important} +.wy-nav-side{background-color:transparent !important} + + diff --git a/scorpio-broker/docs/en/source/_static/css/fiware_readthedocs_processing.css b/scorpio-broker/docs/en/source/_static/css/fiware_readthedocs_processing.css new file mode 100644 index 0000000000000000000000000000000000000000..28755c66676ee57ff909e144192ba8b42d777647 --- /dev/null +++ b/scorpio-broker/docs/en/source/_static/css/fiware_readthedocs_processing.css @@ -0,0 +1,19 @@ + + + + +.wy-menu-vertical .current > a {background:#88a1ce !important} +.wy-menu-vertical .current > a {background:#88a1ce !important} +.wy-menu-vertical li.current a:hover {background-color: #88a1ce} +.wy-menu-vertical li.current > a:hover {background-color:#88a1ce !important} +.wy-menu-vertical a:hover {background-color: #88a1ce} +.wy-menu-vertical span {color:#88a1ce !important} +nav > div.wy-menu.wy-menu-vertical > ul > li > a {color:#88a1ce !important} +.rst-versions {background:#88a1ce none !important} +.rst-versions .rst-current-version {background-color:#88a1ce !important} +.wy-side-nav-search {border-bottom-color : #88a1ce} +.wy-side-nav-search input[type=text] {border-color:#88a1ce} +.wy-nav-top {background-color: #88a1ce} +.rst-content pre {border-color: #88a1ce} +th {background-color:#88a1ce} + diff --git a/scorpio-broker/docs/en/source/buildScorpio.rst b/scorpio-broker/docs/en/source/buildScorpio.rst new file mode 100644 index 0000000000000000000000000000000000000000..322cd108c854fed16e429e78971b022b9a431c48 --- /dev/null +++ b/scorpio-broker/docs/en/source/buildScorpio.rst @@ -0,0 +1,517 @@ +*********************************** +Starting Scorpio via docker-compose +*********************************** + +Start commands to copy +###################### + +Looking for the easiest way to start Scorpio? This is it. +:: + + curl https://raw.githubusercontent.com/ScorpioBroker/ScorpioBroker/development/docker-compose-aaio.yml + sudo docker-compose -f docker-compose-aaio.yml up + + +Introduction +############ +The easiest way to start Scorpio is to use docker-compose. We provide 2 main docker-compose files which rely on dockerhub. +docker-compose-aaio.yml and docker-compose-dist.yml. You can use this files directly as they are to start Scorpio +When you want to run Scorpio in the distributed variant exchange the yml file in the command above. + +docker-compose-aaio.yml +####################### + +AAIO here stands for almost all in one. In this variant the core components of Scorpio and the Spring Cloud components are started within one container. Additional containers are only Kafka and Postgres. For testing and small to medium size deployments this is most likely what you want to use. + +docker-compose-dist.yml +####################### + +In this variant each Scorpio component is started in a different container. This makes it highly flexible and allows you to replace individual components or to start new instances of some core components. + +Configure docker image via environment variables +################################################ + +There are multiple ways to enter environment variables into docker. We will not got through all of them but only through the docker-compose files. However the scorpio relevant parts apply to all these variants. +Configuration of Scorpio is done via the Spring Cloud configuration system. For a complete overview of the used parameters and the default values have a look at the application.yml for the AllInOneRunner here, https://github.com/ScorpioBroker/ScorpioBroker/blob/development/AllInOneRunner/src/main/resources/application-aaio.yml. +To provide a new setting you can provide those via an environment entry in the docker-compose file. The variable we want to set is called spring_args. +Since we only want to set this option for the Scorpio container we make it a sub part of the Scorpio Container entry like this +:: + + scorpio: + image: scorpiobroker/scorpio:scorpio-aaio_1.0.0 + ports: + - "9090:9090" + depends_on: + - kafka + - postgres + environment: + spring_args: --maxLimit=1000 + +With this we would set the maximum limit for a query reply to 1000 instead of the default 500. + +Be quit! docker +############### + +Some docker containers can be quite noisy and you don't want all of that output. The easy solution is to add this +:: + + logging: + driver: none + +in the docker-compose file to respective container config. E.g. to make Kafka quite. +:: + + kafka: + image: wurstmeister/kafka + hostname: kafka + ports: + - "9092" + environment: + KAFKA_ADVERTISED_HOST_NAME: kafka + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ADVERTISED_PORT: 9092 + KAFKA_LOG_RETENTION_MS: 10000 + KAFKA_LOG_RETENTION_CHECK_INTERVAL_MS: 5000 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + depends_on: + - zookeeper + logging: + driver: none + +************************ +Configuration Parameters +************************ + +Scorpio uses the Spring Cloud/Boot configuration system. This is done via the application.yml files in the corresponding folders. +The AllInOneRunner has a complete set of all available configuration options in them. + +Those can be overwriten via the command line or in the docker case as described above. + ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| Config Option | Description | Default Value | ++===================+=================================================+================================================================================+ +| atcontext.url | the url to be used for the internal context | http://localhost:9090/ngsi-ld/contextes/ | +| | server | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| bootstrap.servers | the host and port of the internal kafka | kafka:9092 (default used for docker) | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| broker.id a | unique id for the broker. needed for federation | Broker1 | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| broker.parent. | url for the parent broker in a federation setup | SELF (meaning no federation) | +| location.url | | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| broker. | GeoJSON description of the coverage. used for | empty | +| geoCoverage | registration in a federation setup. | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| defaultLimit | The default limit for a query if no limit is | 50 | +| | provided | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| maxLimit | The maximum number of results in a query | 500 | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| reader.datasource | If you change the postgres setup here you set | ngb | +| .hikari.password | the password | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| reader.datasource | JDBC URL to postgres | jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_reader | +| .hikari.url | | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| reader.datasource | username for the postgres db | ngb | +| .hikari.username | | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| writer.datasource | If you change the postgres setup here you set | ngb | +| .hikari.password | the password | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| writer.datasource | JDBC URL to postgres | jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_writer | +| .hikari.url | | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| writer.datasource | username for the postgres db | ngb | +| .hikari.username | | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ + + + +**************************** +Building Scorpio from source +**************************** + +Scorpio is developed in Java using SpringCloud as microservice framework +and Apache Maven as build tool. Some of the tests require a running +Apache Kafka messagebus (further instruction are in the Setup chapter). +If you want to skip those tests you can run +``mvn clean package -DskipTests`` to just build the individual +microservices. + +General Remarks on Building +########################### + +Further down this document you will get exact build commands/arguments +for the different flavors. This part will give you an overview on how +the different arguments work. + +Maven Profiles +-------------- +There currently three available Maven build profiles + +Default +~~~~~~~ +If you provide no -P argument Maven will produce individual jar files for the microservices and the AllInOneRunner with each "full" microservice packaged (this will result in ca. 500 MB size for the AllInOneRunner) + +docker +~~~~~~ +This will trigger the Maven to build docker containers for each +microservice. + +docker-aaio +~~~~~~~~~~~ +This will trigger the Maven to build one docker container, containing +the AllInOneRunner and the spring cloud components (eureka, configserver +and gateway) + +Maven arguments +~~~~~~~~~~~~~~~ +These arguments are provided via -D in the command line. + +skipTests +~~~~~~~~~ +Generally recommended if you want to speed +up the build or you don't have a kafka instance running, which is +required by some of the tests. + +skipDefault +~~~~~~~~~~~ +This is a special argument for the Scorpio build. This argument will disable springs +repacking for the individual microservices and will allow for a smaller +AllInOneRunner jar file. This argument shoulnd ONLY be used in +combination with the docker-aaio profile. + +Spring Profiles +--------------- + +Spring supports also profiles which can be activated when launching a +jar file. Currently there 3 profiles actively used in Scorpio. The +default profiles assume the default setup to be a individual +microservices. The exception is the AllInOneRunner which as default +assumes to be running in the docker-aaio setup. + +Currently you should be able to run everything with a default profile +except the gateway in combination with the AllInOneRunner. In order to +use these two together you need to start the gateway with the aaio +spring profile. This can be done by attaching this to your start command +-Dspring.profiles.active=aaio. + +Additonally some components have a dev profile available which is purely +meant for development purposes and should only be used for such. + +Setup +##### + +Scorpio requires two components to be installed. + +Postgres +-------- + +Please download the `Postgres DB `__ and +the `Postgis `__ extension and follow the +instructions on the websites to set them up. + +Scorpio has been tested and developed with Postgres 10. + +The default username and password which Scorpio uses is "ngb". If you +want to use a different username or password you need to provide them as +parameter when starting the StorageManager and the RegistryManager. + +e.g. + +.. code:: console + + java -jar Storage/StorageManager/target/StorageManager--SNAPSHOT.jar --reader.datasource.username=funkyusername --reader.datasource.password=funkypassword + +OR + +.. code:: console + + java -jar Registry/RegistryManager/target/RegistryManager--SNAPSHOT.jar --spring.datasource.username=funkyusername --spring.datasource.password=funkypassword + +Don't forget to create the corresponding user ("ngb" or the different +username you chose) in postgres. It will be used by the SpringCloud +services for database connection. While in terminal, log in to the psql +console as postgres user: + +.. code:: console + + sudo -u postgres psql + +Then create a database "ngb": + +.. code:: console + + postgres=# create database ngb; + +Create a user "ngb" and make him a superuser: + +.. code:: console + + postgres=# create user ngb with encrypted password 'ngb'; + postgres=# alter user ngb with superuser; + +Grant privileges on database: + +.. code:: console + + postgres=# grant all privileges on database ngb to ngb; + +Also create an own database/schema for the Postgis extension: + +.. code:: console + + postgres=# CREATE DATABASE gisdb; + postgres=# \connect gisdb; + postgres=# CREATE SCHEMA postgis; + postgres=# ALTER DATABASE gisdb SET search_path=public, postgis, contrib; + postgres=# \connect gisdb; + postgres=# CREATE EXTENSION postgis SCHEMA postgis; + +Apache Kafka +------------ + +Scorpio uses `Apache Kafka `__ for the +communication between the microservices. + +Scorpio has been tested and developed with Kafka version 2.12-2.1.0 + +Please download `Apache Kafka `__ +and follow the instructions on the website. + +In order to start kafka you need to start two components: Start +zookeeper with + +.. code:: console + + /bin/[Windows]/zookeeper-server-start.[bat|sh] /config/zookeeper.properties + +Start kafkaserver with + +.. code:: console + + /bin/[Windows]/kafka-server-start.[bat|sh] /config/server.properties + +For more details please visit the Kafka +`website `__. + +Getting a docker container +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The current maven build supports two types of docker container +generations from the build using maven profiles to trigger it. + +The first profile is called 'docker' and can be called like this + +.. code:: console + + sudo mvn clean package -DskipTests -Pdocker + +this will generate individual docker containers for each micro service. +The corresponding docker-compose file is ``docker-compose-dist.yml`` + +The second profile is called 'docker-aaio' (for almost all in one). This +will generate one single docker container for all components the broker +except the kafka message bus and the postgres database. + +To get the aaio version run the maven build like this + +.. code:: console + + sudo mvn clean package -DskipTests -DskipDefault -Pdocker-aaio + +The corresponding docker-compose file is ``docker-compose-aaio.yml`` + +Starting the docker container +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To start the docker container please use the corresponding +docker-compose files. I.e. + +.. code:: console + + sudo docker-composer -f docker-compose-aaio.yml up + +to stop the container properly execute + +.. code:: console + + sudo docker-composer -f docker-compose-aaio.yml down + +General remark for the Kafka docker image and docker-compose +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The Kafka docker container requires you to provide the environment +variable ``KAFKA_ADVERTISED_HOST_NAME``. This has to be changed in the +docker-compose files to match your docker host IP. You can use +``127.0.0.1`` however this will disallow you to run Kafka in a cluster +mode. + +For further details please refer to +https://hub.docker.com/r/wurstmeister/kafka + +Running docker build outside of Maven +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +If you want to have the build of the jars separated from the docker +build you need to provide certain VARS to docker. The following list +shows all the vars and their intended value if you run docker build from +the root dir + +- ``BUILD_DIR_ACS = Core/AtContextServer`` + +- ``BUILD_DIR_SCS = SpringCloudModules/config-server`` + +- ``BUILD_DIR_SES = SpringCloudModules/eureka`` + +- ``BUILD_DIR_SGW = SpringCloudModules/gateway`` + +- ``BUILD_DIR_HMG = History/HistoryManager`` + +- ``BUILD_DIR_QMG = Core/QueryManager`` + +- ``BUILD_DIR_RMG = Registry/RegistryManager`` + +- ``BUILD_DIR_EMG = Core/EntityManager`` + +- ``BUILD_DIR_STRMG = Storage/StorageManager`` + +- ``BUILD_DIR_SUBMG = Core/SubscriptionManager`` + +- ``JAR_FILE_BUILD_ACS = AtContextServer-${project.version}.jar`` + +- ``JAR_FILE_BUILD_SCS = config-server-${project.version}.jar`` + +- ``JAR_FILE_BUILD_SES = eureka-server-${project.version}.jar`` + +- ``JAR_FILE_BUILD_SGW = gateway-${project.version}.jar`` + +- ``JAR_FILE_BUILD_HMG = HistoryManager-${project.version}.jar`` + +- ``JAR_FILE_BUILD_QMG = QueryManager-${project.version}.jar`` + +- ``JAR_FILE_BUILD_RMG = RegistryManager-${project.version}.jar`` + +- ``JAR_FILE_BUILD_EMG = EntityManager-${project.version}.jar`` + +- ``JAR_FILE_BUILD_STRMG = StorageManager-${project.version}.jar`` + +- ``JAR_FILE_BUILD_SUBMG = SubscriptionManager-${project.version}.jar`` + +- ``JAR_FILE_RUN_ACS = AtContextServer.jar`` + +- ``JAR_FILE_RUN_SCS = config-server.jar`` + +- ``JAR_FILE_RUN_SES = eureka-server.jar`` + +- ``JAR_FILE_RUN_SGW = gateway.jar`` + +- ``JAR_FILE_RUN_HMG = HistoryManager.jar`` + +- ``JAR_FILE_RUN_QMG = QueryManager.jar`` + +- ``JAR_FILE_RUN_RMG = RegistryManager.jar`` + +- ``JAR_FILE_RUN_EMG = EntityManager.jar`` + +- ``JAR_FILE_RUN_STRMG = StorageManager.jar`` + +- ``JAR_FILE_RUN_SUBMG = SubscriptionManager.jar`` + +Starting of the components +########################## + +After the build start the individual components as normal Jar files. + +Start the SpringCloud services by running + +.. code:: console + + java -jar SpringCloudModules/eureka/target/eureka-server--SNAPSHOT.jar + java -jar SpringCloudModules/gateway/target/gateway--SNAPSHOT.jar + java -jar SpringCloudModules/config-server/target/config-server--SNAPSHOT.jar + +Start the broker components + +.. code:: console + + java -jar Storage/StorageManager/target/StorageManager--SNAPSHOT.jar + java -jar Core/QueryManager/target/QueryManager--SNAPSHOT.jar + java -jar Registry/RegistryManager/target/RegistryManager--SNAPSHOT.jar + java -jar Core/EntityManager/target/EntityManager--SNAPSHOT.jar + java -jar History/HistoryManager/target/HistoryManager--SNAPSHOT.jar + java -jar Core/SubscriptionManager/target/SubscriptionManager--SNAPSHOT.jar + java -jar Core/AtContextServer/target/AtContextServer--SNAPSHOT.jar + +Changing config +--------------- + +All configurable options are present in application.properties files. In +order to change those you have two options. Either change the properties +before the build or you can override configs by add +``--=-SNAPSHOT.jar --reader.datasource.username=funkyusername --reader.datasource.password=funkypassword` + +Enable CORS support +------------------- + +You can enable cors support in the gateway by providing these +configuration options - gateway.enablecors - default is False. Set to +true for general enabling - gateway.enablecors.allowall - default is +False. Set to true to enable CORS from all origins, allow all headers +and all methods. Not secure but still very often used. - +gateway.enablecors.allowedorigin - A comma separated list of allowed +origins - gateway.enablecors.allowedheader - A comma separated list of +allowed headers - gateway.enablecors.allowedmethods - A comma separated +list of allowed methods - gateway.enablecors.allowallmethods - default +is False. Set to true to allow all methods. If set to true it will +override the allowmethods entry + +Troubleshooting +############### + +Missing JAXB dependencies +------------------------- + +When starting the eureka-server you may facing the + +**java.lang.TypeNotPresentException: Type javax.xml.bind.JAXBContext not +present** exception. It's very likely that you are running Java 11 on +your machine then. Starting from Java 9 package ``javax.xml.bind`` has +been marked deprecated and was finally completely removed in Java 11. + +In order to fix this issue and get eureka-server running you need to +manually add below JAXB Maven dependencies to +``ScorpioBroker/SpringCloudModules/eureka/pom.xml`` before starting: + +.. code:: xml + + ... + + ... + + com.sun.xml.bind + jaxb-core + 2.3.0.1 + + + javax.xml.bind + jaxb-api + 2.3.1 + + + com.sun.xml.bind + jaxb-impl + 2.3.1 + + ... + + ... + +This should be fixed now using conditional dependencies. diff --git a/scorpio-broker/docs/en/source/callFlow.rst b/scorpio-broker/docs/en/source/callFlow.rst new file mode 100644 index 0000000000000000000000000000000000000000..e8fe079af2bb00a8e34a331228f027a5faf2e97c --- /dev/null +++ b/scorpio-broker/docs/en/source/callFlow.rst @@ -0,0 +1,228 @@ +*************** +Operation flows +*************** + +Entity Create/Update/Append +########################### + +.. figure:: figures/flow-1.png + +The Figure is showing the operational flow of entity create/update/append in the Scorpio Broker system. Following are the marked steps interpretation: + +1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to create/update/append an entity in the form of the HTTP POST request. + +2. The request enters in service API gateway. + + 2.1. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service. + + 2.2. The service API gateway forwards the HTTP request to the Entity Manager micro-service. +    +3. The entity Manager internally calls an LDContext resolver service to resolve the payload with the given context sent along with the POST request. Once the payload is resolved with context, it now fetches the previously stored data/entities from the Topic “Entities†and validates the requested entity against the existing stored entities based on EntityID. + +- If the entity is already present (or with all the attributes and values that are requested to be modified), an error message (“already existsâ€) will be responded for the same and no further step will be executed. + +- Else it will move for further processing. + +4. The Entity Manager (EM) will do publish/store and send the response to the requester for the requested Entity(E1) creation operation given as follows: + + 4.1.EM publishes the E1 in the Kafka under Topic “Entitiesâ€. + + 4.2.EM publishes the E1 in the Kafka under Topic “Entity_Create/Update/Append†as well. + + 4.3. Upon successful pub operation, EM will send the response back. + +**Note**: “Entities†topic will save all changes of an entity done over a period of time by any of the create/update/append operations of an entity. However, “Entity_Create/Update/Append†Topic (specific to CREATE operation) will only store the data changes of entity create operation only. Having different topics per operation will avoid ambiguity situations among different consumers different requirements. E.g. the subscription manager may need to subscribe for the whole entity, a set of specific attributes, or might be some value change of certain attributes. So, managing all these requirements would be hard if a separate topic per operation is not maintained and would be very simplified to provide direct delta change in data for the given entity at any point in time if separate topics per operation are maintained. Therefore, putting all operations data in a single topic cannot offer the required decoupling, simplification, and flexibility to subscribe/manage at operations, data, or delta data level requirements. +So that’s why creating separate topics per operation and one common topic for recording all changes (require to validate the whole entity changes for all operations over a period of time) of all operations to the given entity is the favorable design choice. +The context for the given payload is being stored by the LDContext resolver service in the Kafka topic under the name AtContext.   + +5. When a message gets published to Kafka Topics, the consumers of that topic will get notified who has subscribed or listening to those topics. In this case, the consumers of “Entity Create/Update/Append†topic upon receiving notification will do the following: + + 5.1. Subscription Manager when getting a notification for the related entity it will check for the notification validation for the current entity and checks if the notification needs to be sent accordingly. + + 5.2. Storage Manager, upon notification from Entities & CR Topics, will trigger the further operations to store/modify the entity related changes in the DB tables. + +6. Now entity manager also prepares for registration of the entity data model in the Context Registry. Following are the further functions it performs to achieve the same: + + 6.1. So it prepares the csource registration payload (as per NGSI_LD spec section C.3) from the entity payload and fills the necessary field (like id, endpoint as broker IP, location, etc.). Afterword entity manager writes this created csource payload in the CR Topic. + + 6.2.CR Manager listens to this CR topic and then able to know that some entity has registered. + + 6.3.CR manager writes the updates, if any are there, into the Csource Topic.   + +Entity Subscription +################### + +.. figure:: figures/flow-2.png + +The Figure is showing the operational flow of entity subscription in the Scorpio Broker system. Following are the marked steps interpretation: + +1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to subscribe for an entity (or attribute) in the form of the HTTP POST request. + +2. The request enters in service API gateway. + + 2.1. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service. + + 2.2. The service API gateway forwards the HTTP request to the Subscription Manager micro-service. +    +3. The Subscription Manager internally calls an LDContext resolver service to resolve the payload with the given context sent along with the POST request. The subscription manager then fetches the previously stored data/entities from the Topic “Subscription†and validates the requested entity against the existing stored values based on EntityID. + +- If the data for the current request is already present, an error message will be responded for the same and no further step will be executed. + +- Else it will move for further processing. + +4. The Subscription Manager (SM) will publish/store and send the response to the requestor for the requested operation given as follows: + + 4.1.SM publish the subscription S(E1) in the Kafka under Topic “Subscription†+ + 4.2.SM will start the notification functionality and will start/keep listening for related subscription on. + + 4.2.1. Entity related topics “Create/Update/Append†+ + 4.2.2.Context source related topic i.e. “CSource†topic for any future registration of context sources. Doing this it avoids the need to query CR explicitly for csources for already subscribed items/entities. + + 4.2.3.CRQueryResult Topic for gathering results of the raised specific queries, if any are there. + + 4.2.4. Upon successful subscription condition of subscription request, SM will notify the subscribed entity to the given endpoint back. And also do the remote subscriptions to the context sources provided by the context registry. + + 4.3. Upon successful pub operation, SM will send the response back +    +5.SM optionally may raise the query to CR by posting in the CRQuery Topic for each of the subscription requests received (only once per each subscription request). When a message gets published to CRQuery Topic, the consumer CR will be notified who has subscribed or listening on this topic. Now, CR will do the following: + + 5.1. CR will receive the notification and checks for the list of context sources by pulling data from CR Topic and/or CSourceSub Topic for whom this subscription may valid. + + 5.2.CR publishes the list of context sources into the CRQueryResult topic upon which the SM would have already started listening and repeat steps 4.2.3 and 4.2.4. + +**Note**: CSource Topic will contain the list of context sources registered through Csource registration interface directly. CR Topic will contain the map of Entity Data model (maintained as an entity ID) created based on entity creation request (through IoT broker interface) and/or provider/data source of that entity model. +Limitation: In the first release of Scorpio Broker, Csource query is not supported instead csource query is based on the internal messaging queue mechanism. In the future, both the message queue and Rest based csource query would be supported. + + + +Query +##### + +.. figure:: figures/flow-3.png + +The Figure is showing the operational flow of entity subscription in the Scorpio Broker system. Following are the marked steps interpretation: + +1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to query for entities/an entity/attribute in the form of an HTTP GET request. + +2. The request enters in service API gateway. + + 2.1. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service. + + 2.2. The service API gateway forwards the HTTP request to the Query Manager micro-service. +    +3. The query manager now fetches the previously stored data/entities from the Topic “Entitiesâ€. + +- If the query is for all entities or specific entities with id and/or attribute is requested, this will be directly served based on Kafka Entity topic data by query manager without involving the storage manager. In short simpler queries like non-geo queries or without regular expression queries associated with entity or entities can be served directly. In this case, the response will be sent back and processing jumps to step 7.2. + +- For complex queries, the query manager will take help from the storage manager as mention in the following steps. + +4. The Query Manager (in case of complex queries) will publish the query (embedding a used in the message and other metadata) into the Query topic which is being listened by the Storage manager. + +5. The storage manager gets the notification for the requested query and starts processing the query over the DB data and builds the query response. + +6. The storage manager publishes the response of query in the Query topic which is being listened by Query manager. + +7. The QM receives the notification from the QueryResult topic. + + 7.1.  It sends the HTTP response back to the API gateway. + + 7.2.API gateway sends back the response to the end-user/requestor. + + + + + +Context Source Registration +########################### + +.. figure:: figures/flow-4.png + +The Figure is showing the operational flow of context source registration in the Scorpio Broker system. Following are the marked steps interpretation: + +1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to csource registration for in the form of an HTTP POST request. + +2. The request enters in service API gateway. + + a. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service. + + b.The service API gateway forwards the HTTP request to the Context Registry (CR) Manager micro-service.   + +3. The CR manager now fetches the previously stored data/entities from the Topic “CSourceâ€. + + a. If the entry for the request csource is already present it exits the processing and informing the same to the requester. If it is not present, then it continues for further processing. + + b.Now the CR manager performs some basic validation to check if this is a valid request with the valid payload. + + c.CR manager now writes this payload into the Csoure Topic. + +4. The Storage Manager will keep listening for the Csource topic and for any new entry write it perform the relative operation in the database. + +5. The CR manager prepares the response for csource request and + + 5.1 sends the Http response back to the API gateway. + + 5.2 API gateway sends back the response to the end-user/requester. + +**Note**: For Conext Source Update request only the payload will get changes and in step 3 upon validation for the existing entity it will not exit rather it will update the retrieved entity and write it back into the Kafka. The rest of the flow will remain mostly the same. + + + + +Context Source Subscription +########################### + +.. figure:: figures/flow-5.png + +The Figure Scorpio Broker Context Source Subscription Flow is showing the operational flow of context source subscriptions in the Scorpio Broker system. Following are the marked steps interpretation: + +1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to csource updates in the form of an HTTP POST request. + +2. The request enters in service API gateway. + + a. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service. + + b.The service API gateway forwards the HTTP request to the Context Registry (CR) Manager micro-service. +    +3. The CR manager now fetches the previously stored data/entities from the Topic “CSourceSubâ€. + + a. Now the CR manager performs some basic validation to check if this is a valid request with the valid payload. + + b.If the entry for the request csource subscription is already present it exits the processing and informing the same to the requester. If it is not present, then it continues for further processing. + + c.CR manager now writes this payload into the CsourceSub Topic. + + d.In parallel, it will also start an independent thread to listen Csource Topic for the requested subscription and upon the successful condition, the notification will be sent to the registered endpoint provided under subscription payload. + +4. The Storage Manager will keep listening for the CsourceSub topic and for any new/updated entry write it perform the relative operation in the database. + +5. The CR manager prepares the response for csource subscription request and + + 5.1 sends the Http response back to the API gateway. + + 5.2 API gateway sends back the response to the end-user/requester. + + + + +History +####### + +.. figure:: figures/flow-6.png + +The Figure is showing the operational flow of entity subscription in the Scorpio Broker system. Following are the marked steps interpretation: + +1. An application calls the NGSI-LD compliant interface (exposed by service API gateway) to the history manager in the form of an HTTP POST request. + +2. The request enters in service API gateway. + + a. The service API gateway discovers the actual serving micro-service endpoints (where the incoming requests need to be forwarded) from discovery & registry service. + + b.The service API gateway forwards the HTTP request to the History Manager micro-service.   + +3. The history manager now executes the EVA algorithm approach on the received payload and push payload attributes to Kafka topic “TEMPORALENTITYâ€. + +**Note**: History Manager must walk through each attribute at the root level of the object (except @id and @type). Inside each attribute, it must walk through each instance (array element). Then, it sends the current object to the Kafka topic TEMPORALENTITY. + +4. The history manager will keep listening to the “TEMPORALENTITY†topic and for any new entry and performs the relative operation in the database. \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/conf.py b/scorpio-broker/docs/en/source/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..dec4a4453bc9169bbcb38db498310fb037d52873 --- /dev/null +++ b/scorpio-broker/docs/en/source/conf.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- Project information ----------------------------------------------------- + +project = 'ScorpioBroker' +copyright = '2020, NECTI + NLE' +author = 'NECTI + NLE' + +# The short X.Y version +version = '1.0' +# The full version, including alpha/beta/rc tags +release = '' + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path . +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +#html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +#html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} +def setup(app): + app.add_stylesheet('css/fiware_readthedocs.css') + app.add_stylesheet('css/fiware_readthedocs_core.css') + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'ScorpioBrokerdoc' + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'ScorpioBroker.tex', 'ScorpioBroker Documentation', + 'NECTI', 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'scorpiobroker', 'ScorpioBroker Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'ScorpioBroker', 'ScorpioBroker Documentation', + author, 'ScorpioBroker', 'One line description of project.', + 'Miscellaneous'), +] \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/config.rst b/scorpio-broker/docs/en/source/config.rst new file mode 100644 index 0000000000000000000000000000000000000000..b331592b82516e4ab6ea5367d22ce4e5b2ff4345 --- /dev/null +++ b/scorpio-broker/docs/en/source/config.rst @@ -0,0 +1,165 @@ +***************************** +Config parameters for Scorpio +***************************** + +This section covers all the basic configuration needed for the Scorpio broker. This can be used as the basic template for the various micro-services of the Scorpio. + +Description of various configuration parameters +############################################### + +1. server:- In this, the user can define the various server related parameters like **port** and the maximum **number of threads** for the internal tomcat server. This is related to the microservice communication. Be careful with changes. + +.. code-block:: JSON + + server: + port: XXXX + tomcat: + max: + threads: XX + +2. Entity Topics:- These are the topics which are used for the internal communication of Scorpio on Kafka. If you change this you need to change things in the source code too. + +.. code-block:: JSON + + entity: + topic: XYZ + create: + topic: XYZ + append: + topic: XYZ + update: + topic: XYZ + delete: + topic: XYZ + index: + topic: XYZ + +3. batchoperations:- Used to define the limit for the batch operations defined by NGSI-LD operations. This is http server config and hardware related. Change with caution. + +.. code-block:: JSON + + batchoperations: + maxnumber: + create: XXXX + update: XXXX + upsert: XXXX + delete: XXXX + +4. bootstrap:- Used to define the URL for the Kafka broker. Change only if you have changed the setup of Kafka + +.. code-block:: JSON + + bootstrap: + servers: URL + +5. csources Topics:- These are the topics which are used for the internal communication of Scorpio on Kafka. If you change this you need to change things in the source code too. + +.. code-block:: JSON + + registration: + topic: CONTEXT_REGISTRY + +6. append:- Used to define the entity append overwrite option. Change with only with extreme caution. + +.. code-block:: JSON + + append: + overwrite: noOverwrite + + +7. spring:- Used to define the basic details of the project like service name as well as to provide the configuration details for Kafka, flyway, data source, and cloud. DO NOT CHANGE THOSE UNLESS YOU KNOW WHAT YOU ARE DOING! + +.. code-block:: JSON + + spring: + application: + name: serviceName + main: + lazy-initialization: true + kafka: + admin: + properties: + cleanup: + policy: compact + flyway: + baselineOnMigrate: true + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + bindings: + ATCONTEXT_WRITE_CHANNEL: + destination: ATCONTEXT + contentType: application/json + datasource: + url: "jdbc:postgresql://127.0.0.1:5432/ngb?ApplicationName=ngb_querymanager" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 + + +8. query Topics:- These are the topics which are used for the internal communication of Scorpio on Kafka. If you change this you need to change things in the source code too. + +.. code-block:: JSON + + query: + topic: QUERY + result: + topic: QUERY_RESULT + +9. atcontext:- Used to define the URL for served context by scorpio for scenarios where a mixed context is provided via a header. + +.. code-block:: JSON + + atcontext: + url: http://:/ngsi-ld/contextes/ + +10. Key:- Used to define the file for the deserialization. DO NOT CHANGE! + +.. code-block:: JSON + + key: + deserializer: org.apache.kafka.common.serialization.StringDeserializer + +11. reader:- Used to configure the database to the Scorpio broker, required to perform all the read operations. This example is based on the default config for a local installed Postgres DB + +.. code-block:: JSON + + reader: + enabled: true + datasource: + url: "jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_storagemanager_reader" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP_Reader + maxLifetime: 2000000 + connectionTimeout: 30000 + +12. writer:- Used to configure the database to the Scorpio broker, required to perform all the write operations. This example is based on the default config for a local installed Postgres DB. + +.. code-block:: JSON + + writer: + enabled: true + datasource: + url: "jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_storagemanager_writer" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP_Writer + maxLifetime: 2000000 + connectionTimeout: 30000 diff --git a/scorpio-broker/docs/en/source/contributionGuideline.md b/scorpio-broker/docs/en/source/contributionGuideline.md new file mode 100644 index 0000000000000000000000000000000000000000..9791a8b32dedb87adc2bb31139dcad27d3ace2b4 --- /dev/null +++ b/scorpio-broker/docs/en/source/contributionGuideline.md @@ -0,0 +1,56 @@ +Branch Management Guidelines +============================ + +![](figures/gitGuideline.jpg) + +The community can have two main branches with an infinite lifetime: + +1. **Master branch**: This is a highly stable branch that is always + production-ready and contains the last release version of source + code in production. +2. **Development branch**: Derived from the master branch, the + development branch serves as a branch for integrating different + features planned for an upcoming release. This branch may or may not + be as stable as the master branch. It is where developers + collaborate and merge feature branches. All of the changes should be + merged back into the master somehow and then tagged with a release + number. + +Apart from those two primary branches, there are other branches in the +workflow: + +- **Feature Branch**: Forked from the development branch for feature + development i.e. enhancement or documentation. Merged back to the + development branch after feature development or enhancement + implementation. +- **Bug Branch**: Ramify from the development branch. Merged back to + the development branch after bug fixing. +- **Hotfix branch**: Hotfix branches are created from the master + branch. It is the current production release running live and + causing troubles due to a severe bug. But changes in development are + yet unstable. We may then branch off a hotfix branch and start + fixing the problem. It should be the rarest occasion, in case only + critical bugs. + +**Note**: Only NLE and NECTI members have the privilege to create and +merge the Hotfix branch. + +| Branch | Branches naming guideline | Remarks | +|------------------|-------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------| +| Feature branches | Must branch from: *development*. Must merge back into: *development*. Branch naming convention: *feature-feature_id* | *feature_id* is the Github issue id from **https://github.com/ScorpioBroker/ScorpioBroker/issues** | +| Bug Branches | Must branch from: *development*. Must merge back into: *development*. Branch naming convention: *bug-bug_id* | *bug_id* is the Github issue id from **https://github.com/ScorpioBroker/ScorpioBroker/issues** | +| Hotfix Branches | Must branch from: *master branch*. Must merge back into: *master branch*. Branch naming convention: *hotfix-bug number* | *Bug number* is the Github issue id from **https://github.com/ScorpioBroker/ScorpioBroker/issues** | + +Permissions to the branches: +---------------------------- + +- **Master** - We tend to very strict that only NLE members and + privileged members of NECTI can merge on the Master branch and + accept the pull requests. Pull requests to master can be raised by + only NECTI OR NLE members. +- **Development** - Any community member can raise the pull request to + the development branch but it should be reviewed by NLE or NECTI + members. Development branches commits will be moved to the master + branch only when all the test cases written under NGSI-LD test + suites, will run successfully. + diff --git a/scorpio-broker/docs/en/source/contributionGuideline.rst b/scorpio-broker/docs/en/source/contributionGuideline.rst new file mode 100644 index 0000000000000000000000000000000000000000..1c3ba15fa3cc0c2a4211742519ea2503658c5f1b --- /dev/null +++ b/scorpio-broker/docs/en/source/contributionGuideline.rst @@ -0,0 +1,65 @@ +*********************** +Contribution guidelines +*********************** + +Branch Management Guidelines +############################ + +.. figure:: figures/gitGuideline.jpg + +The community can have two main branches with an infinite lifetime: + +1. **Master branch**: This is a highly stable branch that is always + production-ready and contains the last release version of source + code in production. +2. **Development branch**: Derived from the master branch, the + development branch serves as a branch for integrating different + features planned for an upcoming release. This branch may or may not + be as stable as the master branch. It is where developers + collaborate and merge feature branches. All of the changes should be + merged back into the master somehow and then tagged with a release + number. + +Apart from those two primary branches, there are other branches in the +workflow: + +- **Feature Branch**: Forked from the development branch for feature + development i.e. enhancement or documentation. Merged back to the + development branch after feature development or enhancement + implementation. +- **Bug Branch**: Ramify from the development branch. Merged back to + the development branch after bug fixing. +- **Hotfix branch**: Hotfix branches are created from the master + branch. It is the current production release running live and + causing troubles due to a severe bug. But changes in development are + yet unstable. We may then branch off a hotfix branch and start + fixing the problem. It should be the rarest occasion, in case only + critical bugs. + +**Note**: Only NLE and NECTI members have the privilege to create and +merge the Hotfix branch. + ++------------------+-------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------+ +| Branch | Branches naming guideline | Remarks | ++==================+=========================================================================================================================+====================================================================================================+ +| Feature branches | Must branch from: *development*. Must merge back into: *development*. Branch naming convention: *feature-feature_id* | *feature_id* is the Github issue id from **https://github.com/ScorpioBroker/ScorpioBroker/issues** | ++------------------+-------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------+ +| Bug Branches | Must branch from: *development*. Must merge back into: *development*. Branch naming convention: *bug-bug_id* | *bug_id* is the Github issue id from **https://github.com/ScorpioBroker/ScorpioBroker/issues** | ++------------------+-------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------+ +| Hotfix Branches | Must branch from: *master branch*. Must merge back into: *master branch*. Branch naming convention: *hotfix-bug number* | *Bug number* is the Github issue id from **https://github.com/ScorpioBroker/ScorpioBroker/issues** | ++------------------+-------------------------------------------------------------------------------------------------------------------------+----------------------------------------------------------------------------------------------------+ + + +Permissions to the branches +*************************** + +- **Master** - We tend to very strict that only NLE members and + privileged members of NECTI can merge on the Master branch and + accept the pull requests. Pull requests to master can be raised by + only NECTI OR NLE members. +- **Development** - Any community member can raise the pull request to + the development branch but it should be reviewed by NLE or NECTI + members. Development branches commits will be moved to the master + branch only when all the test cases written under NGSI-LD test + suites, will run successfully. + diff --git a/scorpio-broker/docs/en/source/docker.rst b/scorpio-broker/docs/en/source/docker.rst new file mode 100644 index 0000000000000000000000000000000000000000..b77e893faa02b98e0c424416bdfaef523d15c9c3 --- /dev/null +++ b/scorpio-broker/docs/en/source/docker.rst @@ -0,0 +1,98 @@ +**************************** +Getting a docker container +**************************** + +The current maven build supports two types of docker container generations from the build using maven profiles to trigger it. + +The first profile is called 'docker' and can be called like this + +:: + + mvn clean package -DskipTests -Pdocker + +this will generate individual docker containers for each microservice. The corresponding docker-compose file is `docker-compose-dist.yml` + + +The second profile is called 'docker-aaio' (for almost all in one). This will generate one single docker container for all components of the broker except the Kafka message bus and the Postgres database. + +To get the aaio version run the maven build like this + +:: + + mvn clean package -DskipTests -Pdocker-aaio + +The corresponding docker-compose file is `docker-compose-aaio.yml` + +General remark for the Kafka docker image and docker-compose +============================================================ + +The Kafka docker container requires you to provide the environment variable `KAFKA_ADVERTISED_HOST_NAME`. This has to be changed in the docker-compose files to match your docker host IP. You can use `127.0.0.1` however this will disallow you to run Kafka in a cluster mode. + +For further details please refer to https://hub.docker.com/r/wurstmeister/kafka + +Running docker build outside of Maven +===================================== + +If you want to have the build of the jars separated from the docker build you need to provide certain VARS to docker. +The following list shows all the vars and their intended value if you run docker build from the root dir + +  + - BUILD_DIR_ACS = Core/AtContextServer +  + - BUILD_DIR_SCS = SpringCloudModules/config-server +  + - BUILD_DIR_SES = SpringCloudModules/eureka +  + - BUILD_DIR_SGW = SpringCloudModules/gateway +  + - BUILD_DIR_HMG = History/HistoryManager +  + - BUILD_DIR_QMG = Core/QueryManager +  + - BUILD_DIR_RMG = Registry/RegistryManager +  + - BUILD_DIR_EMG = Core/EntityManager +  + - BUILD_DIR_STRMG = Storage/StorageManager +  + - BUILD_DIR_SUBMG = Core/SubscriptionManager + + - JAR_FILE_BUILD_ACS = AtContextServer-${project.version}.jar +  + - JAR_FILE_BUILD_SCS = config-server-${project.version}.jar +  + - JAR_FILE_BUILD_SES = eureka-server-${project.version}.jar +  + - JAR_FILE_BUILD_SGW = gateway-${project.version}.jar +  + - JAR_FILE_BUILD_HMG = HistoryManager-${project.version}.jar +  + - JAR_FILE_BUILD_QMG = QueryManager-${project.version}.jar +  + - JAR_FILE_BUILD_RMG = RegistryManager-${project.version}.jar +  + - JAR_FILE_BUILD_EMG = EntityManager-${project.version}.jar +  + - JAR_FILE_BUILD_STRMG = StorageManager-${project.version}.jar +  + - JAR_FILE_BUILD_SUBMG = SubscriptionManager-${project.version}.jar + + - JAR_FILE_RUN_ACS = AtContextServer.jar +  + - JAR_FILE_RUN_SCS = config-server.jar +  + - JAR_FILE_RUN_SES = eureka-server.jar +  + - JAR_FILE_RUN_SGW = gateway.jar +  + - JAR_FILE_RUN_HMG = HistoryManager.jar +  + - JAR_FILE_RUN_QMG = QueryManager.jar +  + - JAR_FILE_RUN_RMG = RegistryManager.jar +  + - JAR_FILE_RUN_EMG = EntityManager.jar +  + - JAR_FILE_RUN_STRMG = StorageManager.jar +  + - JAR_FILE_RUN_SUBMG = SubscriptionManager.jar diff --git a/scorpio-broker/docs/en/source/errorHandling.rst b/scorpio-broker/docs/en/source/errorHandling.rst new file mode 100644 index 0000000000000000000000000000000000000000..48677bad2b093b3b07e3150a4b1a1a24f6baf551 --- /dev/null +++ b/scorpio-broker/docs/en/source/errorHandling.rst @@ -0,0 +1,88 @@ +************************* +Error Handling in Scorpio +************************* + +This section will provide info on the error handling mechanism for the Scorpio Broker system. + +Listed below are the events of the system + +.. list-table:: **Error Handling** + :widths: 5 15 35 15 10 20 + :header-rows: 1 + + * - S.No. + - Operation/Event + - Scenario Description + - Responsible  Module + - Error Code/ Response + - Action +     + * - 1. + - InvalidRequest + - The request associated to the operation is syntactically invalid or includes wrong content + - REST Controller + - HTTP 400 + - Log the error & notify the requestor + + * - 2. + - BadRequestData + - The request includes input data which does not meet the requirements of the operation + - REST Controller + - HTTP 400 + - Log the error & notify the requestor + + * - 3. + - AlreadyExists + - The referred element already exists + - REST Controller + - HTTP 409 + - Log the error & notify the requestor + + * - 4. + - OperationNotSupported + - The operation is not supported + - REST Controller + - HTTP 422 + - Log the error & notify the requestor + + * - 5. + - ResourceNotFound + - The referred resource has not been found + - REST Controller + - HTTP 404 + - Log the error & notify the requestor + + * - 6. + - InternalError + - There has been an error during the operation execution + - REST Controller + - HTTP 500 + - Log the error & notify the requestor + + * - 7. + - Method Not Allowed + - There has been an error when a client invokes a wrong HTTP verb over a resource + - REST Controller + - HTTP 405 + - Log the error & notify the requestor + + + + +Please note the errors can also be categorized into following categories for different exceptions that can occur internally to the implementation logic as well: + + 1. Low criticality is those which involve the errors that should be handled by the software logic, and are due to some configuration issues and should not require anything like reset, a reboot of the system. + + 2. Medium Criticality is those which will be tried for the software logic handling but it may need system reset, chip reset and may interrupt system significantly. + + 3. High Criticality is the hardware-based error that should not occur and if occur may need system reset. + +Fail-safe mechanisms for the different category of errors: + + a. For the Low criticality of the errors, logging will be performed, the retry will be performed and error will be handled by means of rollback and sending failure to the upper layers. + + b.For the High Criticality errors, emergency errors will be logged further recommending a reboot. + + c.For the Medium criticality errors logging, retry mechanisms will be implemented further logging emergency logs to the system and recommend a reboot to the administrator. + +During the initialization, failure will be logged as emergency and error will be returned to the calling program \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/figures/MQTT.jpg b/scorpio-broker/docs/en/source/figures/MQTT.jpg new file mode 100644 index 0000000000000000000000000000000000000000..83b22e2a6253b52baf85fbae40ff5e7fab1b5049 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/MQTT.jpg differ diff --git a/scorpio-broker/docs/en/source/figures/architecture.png b/scorpio-broker/docs/en/source/figures/architecture.png new file mode 100644 index 0000000000000000000000000000000000000000..bb4b35451896b991bf5e5df3be4124593701c614 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/architecture.png differ diff --git a/scorpio-broker/docs/en/source/figures/dbconfig-1.png b/scorpio-broker/docs/en/source/figures/dbconfig-1.png new file mode 100644 index 0000000000000000000000000000000000000000..fd2d0cd9fef30e8a27a84b580b21b2af2ef7ecc4 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/dbconfig-1.png differ diff --git a/scorpio-broker/docs/en/source/figures/dbconfig-2.png b/scorpio-broker/docs/en/source/figures/dbconfig-2.png new file mode 100644 index 0000000000000000000000000000000000000000..e8d2adcea876cf6f446d5bd65ed477921271f54b Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/dbconfig-2.png differ diff --git a/scorpio-broker/docs/en/source/figures/dbconfig-3.png b/scorpio-broker/docs/en/source/figures/dbconfig-3.png new file mode 100644 index 0000000000000000000000000000000000000000..3da08dc052b12cf5944e111f1b79aa2602ae61ce Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/dbconfig-3.png differ diff --git a/scorpio-broker/docs/en/source/figures/dbconfig-4.png b/scorpio-broker/docs/en/source/figures/dbconfig-4.png new file mode 100644 index 0000000000000000000000000000000000000000..eaa088a573233daca0e1273380d01f56898f27cf Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/dbconfig-4.png differ diff --git a/scorpio-broker/docs/en/source/figures/dbconfig-5.png b/scorpio-broker/docs/en/source/figures/dbconfig-5.png new file mode 100644 index 0000000000000000000000000000000000000000..8b773414c809226c00e4518631b915fcd8d8d308 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/dbconfig-5.png differ diff --git a/scorpio-broker/docs/en/source/figures/deploymentarchitecture.png b/scorpio-broker/docs/en/source/figures/deploymentarchitecture.png new file mode 100644 index 0000000000000000000000000000000000000000..a4d134bce267ab342e9d7cdafc480c626c30c6db Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/deploymentarchitecture.png differ diff --git a/scorpio-broker/docs/en/source/figures/flow-1.png b/scorpio-broker/docs/en/source/figures/flow-1.png new file mode 100644 index 0000000000000000000000000000000000000000..ae224c46decfd7f08ba2aaed12373918e09dfaea Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/flow-1.png differ diff --git a/scorpio-broker/docs/en/source/figures/flow-2.png b/scorpio-broker/docs/en/source/figures/flow-2.png new file mode 100644 index 0000000000000000000000000000000000000000..c2c2045803a71c2ef798587d9bd072ce379df168 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/flow-2.png differ diff --git a/scorpio-broker/docs/en/source/figures/flow-3.png b/scorpio-broker/docs/en/source/figures/flow-3.png new file mode 100644 index 0000000000000000000000000000000000000000..d3de357e17884f707236996b2d1129388cf090c0 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/flow-3.png differ diff --git a/scorpio-broker/docs/en/source/figures/flow-4.png b/scorpio-broker/docs/en/source/figures/flow-4.png new file mode 100644 index 0000000000000000000000000000000000000000..adb6d1650d835dd81ea7704b091067991668cebb Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/flow-4.png differ diff --git a/scorpio-broker/docs/en/source/figures/flow-5.png b/scorpio-broker/docs/en/source/figures/flow-5.png new file mode 100644 index 0000000000000000000000000000000000000000..b6052b8529015e069d27f4b8806783a016f522f0 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/flow-5.png differ diff --git a/scorpio-broker/docs/en/source/figures/flow-6.png b/scorpio-broker/docs/en/source/figures/flow-6.png new file mode 100644 index 0000000000000000000000000000000000000000..d27d4af0c76e381a49deaf588b54d14a84386135 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/flow-6.png differ diff --git a/scorpio-broker/docs/en/source/figures/gitGuideline.jpg b/scorpio-broker/docs/en/source/figures/gitGuideline.jpg new file mode 100644 index 0000000000000000000000000000000000000000..bea1d4aaf0c95d2f070315fddf56ea4eec54df7a Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/gitGuideline.jpg differ diff --git a/scorpio-broker/docs/en/source/figures/javaTerminal.png b/scorpio-broker/docs/en/source/figures/javaTerminal.png new file mode 100644 index 0000000000000000000000000000000000000000..25946a2d6d0920c55582866118967290913cd4c5 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/javaTerminal.png differ diff --git a/scorpio-broker/docs/en/source/figures/jdk-1.png b/scorpio-broker/docs/en/source/figures/jdk-1.png new file mode 100644 index 0000000000000000000000000000000000000000..eae1efee4b4f629b9ad83daa9e7c8b20fe417609 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/jdk-1.png differ diff --git a/scorpio-broker/docs/en/source/figures/jdk-3.png b/scorpio-broker/docs/en/source/figures/jdk-3.png new file mode 100644 index 0000000000000000000000000000000000000000..f4d8b08b05636fcaec9d376712808e0ab83cf5a5 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/jdk-3.png differ diff --git a/scorpio-broker/docs/en/source/figures/jdk-4.png b/scorpio-broker/docs/en/source/figures/jdk-4.png new file mode 100644 index 0000000000000000000000000000000000000000..f63a21afd5d95f0f03d094a4289682c5f2ed3a4b Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/jdk-4.png differ diff --git a/scorpio-broker/docs/en/source/figures/multivalueDiagram.png b/scorpio-broker/docs/en/source/figures/multivalueDiagram.png new file mode 100644 index 0000000000000000000000000000000000000000..51aedaa6224277bd16245df5ae68c42400b9b872 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/multivalueDiagram.png differ diff --git a/scorpio-broker/docs/en/source/figures/postgresTerminal.png b/scorpio-broker/docs/en/source/figures/postgresTerminal.png new file mode 100644 index 0000000000000000000000000000000000000000..37ea62ecd6ca1a993ffbc49c91466fb6479d6b43 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/postgresTerminal.png differ diff --git a/scorpio-broker/docs/en/source/figures/security.png b/scorpio-broker/docs/en/source/figures/security.png new file mode 100644 index 0000000000000000000000000000000000000000..7f5d3d1197312dc1a1342a1478c20f76cc9f9c56 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/security.png differ diff --git a/scorpio-broker/docs/en/source/figures/tutorialArchitecture.png b/scorpio-broker/docs/en/source/figures/tutorialArchitecture.png new file mode 100644 index 0000000000000000000000000000000000000000..7f127abc66f22cdc84844eb5faca74a7b656ee7b Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/tutorialArchitecture.png differ diff --git a/scorpio-broker/docs/en/source/figures/useCaseDiagram.png b/scorpio-broker/docs/en/source/figures/useCaseDiagram.png new file mode 100644 index 0000000000000000000000000000000000000000..d65c61ff16d0adfd5e93acc037a590ec0caf5580 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/useCaseDiagram.png differ diff --git a/scorpio-broker/docs/en/source/figures/zookee.png b/scorpio-broker/docs/en/source/figures/zookee.png new file mode 100644 index 0000000000000000000000000000000000000000..b2e43178d2dc0c42f35187c445eeb2aed9505557 Binary files /dev/null and b/scorpio-broker/docs/en/source/figures/zookee.png differ diff --git a/scorpio-broker/docs/en/source/hardwareRequirement.rst b/scorpio-broker/docs/en/source/hardwareRequirement.rst new file mode 100644 index 0000000000000000000000000000000000000000..f121ea08cdbe0fddedd34fbcbd4be2ae7c0e56bc --- /dev/null +++ b/scorpio-broker/docs/en/source/hardwareRequirement.rst @@ -0,0 +1,60 @@ +***************************************** +System Requirements +***************************************** + +Java 8 System Requirements +########################## + +**Windows** + +- Windows 10 (8u51 and above) +- Windows 8.x (Desktop) +- Windows 7 SP1 +- Windows Vista SP2 +- Windows Server 2008 R2 SP1 (64-bit) +- Windows Server 2012 and 2012 R2 (64-bit) +- RAM: 128 MB +- Disk space: 124 MB for JRE; 2 MB for Java Update +- Processor: Minimum Pentium 2 266 MHz processor +- Browsers: Internet Explorer 9 and above, Firefox + +**Mac OS X** + +- Intel-based Mac running Mac OS X 10.8.3+, 10.9+ +- Administrator privileges for installation +- 64-bit browser +- A 64-bit browser (Safari, for example) is required to run Oracle Java on Mac. + +**Linux** + +- Oracle Linux 5.5+1 +- Oracle Linux 6.x (32-bit), 6.x (64-bit)2 +- Oracle Linux 7.x (64-bit)2 (8u20 and above) +- Red Hat Enterprise Linux 5.5+1, 6.x (32-bit), 6.x (64-bit)2 +- Red Hat Enterprise Linux 7.x (64-bit)2 (8u20 and above) +- Suse Linux Enterprise Server 10 SP2+, 11.x +- Suse Linux Enterprise Server 12.x (64-bit)2 (8u31 and above) +- Ubuntu Linux 12.04 LTS, 13.x +- Ubuntu Linux 14.x (8u25 and above) +- Ubuntu Linux 15.04 (8u45 and above) +- Ubuntu Linux 15.10 (8u65 and above) +- Browsers: Firefox + + +ZooKeeper Requirements +###################### + +ZooKeeper runs in Java, release 1.6 or greater (JDK 6 or greater). +It runs as an ensemble of ZooKeeper servers. +Three ZooKeeper servers are the minimum recommended size for an ensemble, and we also recommend that they run on separate machines. +At Yahoo!, ZooKeeper is usually deployed on dedicated RHEL boxes, with dual-core processors, 2GB of RAM, and 80GB IDE hard drives. + + +Recommendations for Kafka +######################### + +**Kafka brokers** use both the JVM heap and the OS page cache. The JVM heap is used for the replication of partitions between brokers and for log compaction. Replication requires 1MB (default replica.max.fetch.size) for each partition on the broker. In Apache Kafka 0.10.1 (Confluent Platform 3.1), we added a new configuration (replica.fetch.response.max.bytes) that limits the total RAM used for replication to 10MB, to avoid memory and garbage collection issues when the number of partitions on a broker is high. For log compaction, calculating the required memory is more complicated and we recommend referring to the Kafka documentation if you are using this feature. For small to medium-sized deployments, 4GB heap size is usually sufficient. In addition, it is highly recommended that consumers always read from memory, i.e. from data that was written to Kafka and is still stored in the OS page cache. The amount of memory this requires depends on the rate at which this data is written and how far behind you expect consumers to get. If you write 20GB per hour per broker and you allow brokers to fall 3 hours behind in normal scenario, you will want to reserve 60GB to the OS page cache. In cases where consumers are forced to read from disk, performance will drop significantly + +**Kafka Connect** itself does not use much memory, but some connectors buffer data internally for efficiency. If you run multiple connectors that use buffering, you will want to increase the JVM heap size to 1GB or higher. + +**Consumers** use at least 2MB per consumer and up to 64MB in cases of large responses from brokers (typical for bursty traffic). Producers will have a buffer of 64MB each. Start by allocating 1GB RAM and add 64MB for each producer and 16MB for each consumer planned. \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/index.rst b/scorpio-broker/docs/en/source/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..cb40806b9ab4792f9b336297c6fa7db95880efeb --- /dev/null +++ b/scorpio-broker/docs/en/source/index.rst @@ -0,0 +1,60 @@ +===================== +Scorpio Broker +===================== + +The Scorpio Broker implements the NGSI-LD API through which context producers and consumers can interact with each other. For Example in the typical IoT based room, various sensors like temperature sensors, light sensors, etc are connected to the central application which uses those sensors output and acts as the consumer. There can be a lot of use cases for this central application i.e Scorpio. + +1. Scorpio uses the NGSI-LD API and information model to model entities with their properties and relationships, thus forming a property graph with the enitites as the nodes. It allows finding information by discovering entities, following relationships and filtering according to properties, relationships and related meta-information. For data not directly represented in NGSI-LD like video streams or 3D models, links can be added to the model that allows consumers to directly access this information. In this way, Scorpio can provide a graph-based index to a data lake. + +2. Scorpio provides several interfaces for querying the stored data so easily analytics can be done on the stored data. like it can be used to predict the situation of an ecosystem. Example:- In a huge building there can be several fire sensors, temperature sensors, and smoke sensors. In case of a false fire alarm, it can be verified by the collected fire data, temperature data and smoke data of the particular area. + +3. Scorpio can be used for determining the accuracy of any event. For example, In an automated car, the speed of the car can be known by several applications like GPS, speed camera and speedometer. Scorpio's internal data is stored in this way that any third-party application can use it and can find the accuracy and determine faulty device. + +.. figure:: figures/useCaseDiagram.png + +.. toctree:: + :maxdepth: 1 + :caption: Introduction + :numbered: + + introduction.rst + +.. toctree:: + :maxdepth: 1 + :caption: Beginner Guide + :numbered: + + onepageTutorial.rst + buildScorpio.rst + mqtt.rst + +.. toctree:: + :maxdepth: 1 + :caption: FIWARE NGSI-LD API Walkthrough + :numbered: + + API_walkthrough.rst + +.. toctree:: + :maxdepth: 1 + :caption: Developer Guide + :numbered: + + installationGuide.rst + hardwareRequirement.rst + errorHandling.rst + security.rst + HelloWorld.rst + multivalue.rst + +.. toctree:: + :maxdepth: 1 + :caption: Advanced User Guide + :numbered: + + systemOverview.rst + callFlow.rst + contributionGuideline.rst + docker.rst + config.rst + troubleshooting.rst diff --git a/scorpio-broker/docs/en/source/installationGuide.rst b/scorpio-broker/docs/en/source/installationGuide.rst new file mode 100644 index 0000000000000000000000000000000000000000..8fab577fbb5bd4312d6dd1a0d00465b9a01e9b36 --- /dev/null +++ b/scorpio-broker/docs/en/source/installationGuide.rst @@ -0,0 +1,269 @@ +**************************** +Developer Installation Guide +**************************** + +In order to set-up the environment of Scorpio broker, the following dependency needs to be configured:- + +1. Eclipse. +2. Server JDK. +3. Apache Kafka. +4. PostgreSQL + + +Windows +####### + +Eclipse installation +******************** + +- **Download the Eclipse Installer.**: + + Download Eclipse Installer from http://www.eclipse.org/downloads.Eclipse is hosted on many mirrors around the world. Please select the one closest to you and start to download the Installer. + +- **Start the Eclipse Installer executable**: + + For Windows users, after the Eclipse Installer, the executable has finished downloading it should be available in your download directory. Start the Eclipse Installer executable. You may get a security warning to run this file. If the Eclipse Foundation is the Publisher, you are good to select Run. + + For Mac and Linux users, you will still need to unzip the download to create the Installer. Start the Installer once it is available. + +- **Select the package to install**: + + The new Eclipse Installer shows the packages available to Eclipse users. You can search for the package you want to install or scroll through the list. Select and click on the package you want to install. + +- **Select your installation folder** + + Specify the folder where you want Eclipse to be installed. The default folder will be in your User directory. Select the ‘Install’ button to begin the installation. + +- **Launch Eclipse** + + Once the installation is complete you can now launch Eclipse. The Eclipse Installer has done its work. Happy coding. + + +JDK Setup +********* + +- Start the JDK installation and hit the “Change destination folder†checkbox, then click 'Install.' + +**Note:-** Recommended version is JDK-11. Scorpio Broker is developed and tested with this version only. + +.. figure:: figures/jdk-1.png + +- Change the installation directory to any path without spaces in the folder name. + + +After you've installed Java in Windows, you must set the  JAVA_HOME  environment variable to point to the Java installation directory. + +**Set the JAVA_HOME Variable** + +To set the JAVA_HOME variable: + +1. Find out where Java is installed. If you didn't change the path during installation, it will be something like this: + + *C:\Program Files\Java\jdk1.version_detail* + +2. - In Windows 8/10 go to **Control Panel** > **System** > **Advanced System Settings**. + + OR + + - In Windows 7 right-click **My Computer** and select **Properties** > **Advanced**. + +3. Click the Environment Variables button. + +4. Under System Variables, click New. + +5. In the User Variable Name field, enter: **JAVA_HOME** + +6. In the User Variable Value field, enter your JDK path. + + (Java path and version may change according to the version of Kafka you are using) + +7. Now click OK. + +8. Search for a Path variable in the “System Variable†section in the “Environment Variables†dialogue box you just opened. + +9. Edit the path and type *;%JAVA_HOME%\bin* at the end of the text already written there, just like the image below: + +.. figure:: figures/jdk-3.png + + +- To confirm the Java installation, just open cmd and type “java –version.â€Â You should be able to see the version of Java you just installed. + +.. figure:: figures/jdk-4.png + +If your command prompt somewhat looks like the image above, you are good to go. Otherwise, you need to recheck whether your setup version matches the correct OS architecture (x86, x64), or if the environment variables path is correct. + + +Setting Up Kafka +**************** + +1. Go to your Kafka config directory. For example:- **C:\kafka_2.11-0.9.0.0\config** +2. Edit the file “server.properties.†+3. Find and edit the line log.dirs=/tmp/kafka-logs†to “log.dir= C:\kafka_2.11-0.9.0.0\kafka-logs. +4. If your ZooKeeper is running on some other machine or cluster you can edit “zookeeper.connect:2181â€Â to your custom IP and port. For this demo, we are using the same machine so there's no need to change. Also the Kafka port and broker.id are configurable in this file. Leave other settings as is. +5. Your Kafka will run on default port 9092 and connect to ZooKeeper’s default port, 2181. + +**Note**: For running Kafka, zookeepers should run first. At the time of closing Kafka, zookeeper should be closed first than Kafka. Recommended version of kafka is kafka_2.12-2.1.0. + + +Running a Kafka Server +********************** + +Important: Please ensure that your ZooKeeper instance is up and running before starting a Kafka server. + +1. Go to your Kafka installation directory:** C:\kafka_2.11-0.9.0.0\** +2. Open a command prompt here by pressing Shift + right-click and choose the “Open command window here†option). +3. Now type **.\bin\windows\kafka-server-start.bat .\config\server.properties** and press Enter,then +4. Type **.\bin\windows\kafka-server-start.bat .\config\server.properties** in new command window and hit enter. + + +Setting up PostgreSQL +********************* + +Step 1) Go to https://www.postgresql.org/download. + + +Step 2) You are given two options:- + + 1. Interactive Installer by EnterpriseDB + 2. Graphical Installer by BigSQL + +BigSQL currently installs pgAdmin version 3 which is deprecated. It's best to choose EnterpriseDB which installs the latest version 4 + + +Step 3) + + 1. You will be prompted to the desired Postgre version and operating system. Select the Postgres 10, as Scorpio has been tested and developed with this version. + + 2. Click the Download Button, Download will begin + +Step 4) Open the downloaded .exe and Click next on the install welcome screen. + + +Step 5) + + 1. Change the Installation directory if required, else leave it to default + + 2.Click Next + + +Step 6) + + 1. You can choose the components you want to install in your system. You may uncheck Stack Builder + + 2. Click on Next + + +Step 7) + + 1. You can change the data location + + 2.Click Next + + +Step 8) + + 1. Enter the superuser password. Make a note of it + + 2.Click Next + + +Step 9) + + 1. Leave the port number as the default + + 2.Click Next + + +Step 10) + + 1. Check the pre-installation summary. + + 2.Click Next + +Step 11) Click the next button + +Step 12) Once install is complete you will see the Stack Builder prompt + + 1. Uncheck that option. We will use Stack Builder in more advance tutorials + + 2.Click Finish + +Step 13) To launch Postgre go to Start Menu and search pgAdmin 4 + +Step 14) You will see pgAdmin homepage + +Step 15) Click on Servers > Postgre SQL 10 in the left tree + +.. figure:: figures/dbconfig-1.png + +Step 16) + + 1.Enter superuser password set during installation + + 2. Click OK + +Step 17) You will see the Dashboard + +.. figure:: figures/dbconfig-2.png + +That's it to Postgre SQL installation. + +Linux +##### +JDK Setup +********* +To create a Java environment in your machine install the JDK, for this open the terminal, and run the following commands:- + +1. sudo apt-get update + +2. sudo apt-get install openjdk-8-jdk + +To check that JDK is properly installed in your machine, run the command **java -version** in your terminal if it returns the version of the JDK as 11 then it's working fine. + +.. figure:: figures/javaTerminal + +Eclipse installation +******************** + +To install the eclipse in your linux machine first, visit the link https://www.eclipse.org/downloads/ and select the version of eclipse based on the flavor of your linux machine. + +Setting Up Kafka +**************** + +To download the Apache Kafka in your machine run the following commands one by one in your terminal. + +1. mkdir kafka +2. cd kafka +3. wget https://archive.apache.org/dist/kafka/2.2.0/kafka_2.12-2.2.0.tgz +4. tar -xzf kafka_2.12-2.2.0.tgz + +Once the Kafka is downloaded in your machine hit the following commands to get it run + +1. kafka_2.12-2.2.0/bin/zookeeper-server-start.sh kafka_2.12-2.2.0/config/zookeeper.properties > /dev/null 2>&1 & +2. kafka_2.12-2.2.0/bin/kafka-server-start.sh kafka_2.12-2.2.0/config/server.properties > /dev/null 2>&1 & + +Setting up PostgreSQL +********************* +In order to download the PostgreSQL in your machine run the following commands from your terminal. + +1. sudo apt update +2. sudo apt-get install postgresql-10 +3. service postgresql status + +The last command will give us the status of the PostgreSQL four your machine if this matches to +one in the picture then everything is properly installed else re-run the commands. +.. figure:: figures/postgresTerminal + +Once PostgreSQL is successfully installed in your machine create the database **ngb** and change its role by running the following commands: + +1. psql -U postgres -c "create database ngb;" +2. psql -U postgres -c "create user ngb with password 'ngb';" +3. psql -U postgres -c "alter database ngb owner to ngb;" +4. psql -U postgres -c "grant all privileges on database ngb to ngb;" +5. psql -U postgres -c "alter role ngb superuser;" +6. sudo apt install postgresql-10-postgis-2.4 +7. sudo apt install postgresql-10-postgis-scripts +8. sudo -u postgres psql -U postgres -c "create extension postgis; + +After this your PostgreSql is ready to use for Scorpio Boker. + diff --git a/scorpio-broker/docs/en/source/introduction.rst b/scorpio-broker/docs/en/source/introduction.rst new file mode 100644 index 0000000000000000000000000000000000000000..aa9bb0a979d1d38f338a6ad56fc884a0b8a68d78 --- /dev/null +++ b/scorpio-broker/docs/en/source/introduction.rst @@ -0,0 +1,29 @@ +******************************* +Introduction +******************************* + +Scorpio broker is a reference implementation of **NGSI-LD standard** specifications that are compliant to **ETSI standards**. Basically Scorpio broker is a core component of **FiWARE/IoT** platform wherein IoT data-driven by dynamic context is collected, processed, notified & stored/ingested with different application usage perspectives. +Scorpio broker also provides an implementation of REST API endpoints for various data context operations that conform to **NGSI-LD API** specification. +Scorpio broker allows you to collect, process, notify and store the IoT data with dynamic context with the use of linked data concepts. +It makes use of the **microservice-based architecture** build with the help of **spring boot**, which has its own advantages over the existing IoT brokers such as scalability, cross-technology integration, etc. + +Scorpio Broker based on NGSI-LD offers a unique feature of Link data context that provides self-contained (or referenced) **dynamic schema definition** (i.e. the context) for contained data in each message/entity. +Thus allows the Scorpio Broker core processing to still remain unified even it gets dynamic context-driven data as its input from different types of data sources coupled(or designed for) with different schemas. + +Key advantages of Scorpio Broker over other brokers: + +- Uses micro-service architecture which enhances the performance drastically. + +- The Scorpio Broker architecture is designed & implemented as a scalable, highly available, and load balanced. + +- Use of Ld which gives us the leverage of dynamic context. + +- Usage of Kafka, allowing us the robust pub-sub service with the facility of scaling with no downtime. + +- It provides fail-over resiliency. + +- It provides load balancing to distribute the load on distributed infrastructure. + +- It is modular enough to offer low coupling and high cohesion by design. + +- It offers different storage integration without changing the application logic time and again. diff --git a/scorpio-broker/docs/en/source/ld-subscriptions-registrations.md b/scorpio-broker/docs/en/source/ld-subscriptions-registrations.md new file mode 100644 index 0000000000000000000000000000000000000000..a3f38b09a4e18ac375a3bc1a0548883f3e0da71f --- /dev/null +++ b/scorpio-broker/docs/en/source/ld-subscriptions-registrations.md @@ -0,0 +1,622 @@ +[![FIWARE Core Context Management](https://nexus.lab.fiware.org/repository/raw/public/badges/chapters/core.svg)](https://github.com/FIWARE/catalogue/blob/master/core/README.md) +[![NGSI LD](https://img.shields.io/badge/NGSI-LD-d6604d.svg)](https://www.etsi.org/deliver/etsi_gs/CIM/001_099/009/01.01.01_60/gs_CIM009v010101p.pdf) +[![JSON](https://img.shields.io/badge/JSON--LD-1.1-f06f38.svg)](https://w3c.github.io/json-ld-syntax/) + +**Description:** This tutorial discusses the usage of subscriptions and registrations within NGSI-LD and highlights the +similarities and differences between the equivalent NGSI-v2 and NGSI-LD operations. + +The tutorial uses [cUrl](https://ec.haxx.se/) commands throughout, but is also available as +[Postman documentation](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/) + +[![Run in Postman](https://run.pstmn.io/button.svg)](https://github.com/ScorpioBroker/ScorpioBroker/blob/feature-80-temp/docs/en/source/Payloads/FIWARE%20Linked%20Data%20Subscriptions%20and%20Registrations.postman_collection.json) + +
+ +# Understanding Linked Data Subscriptions and Registrations + +> “Do not repeat after me words that you do not understand. Do not merely put on a mask of my ideas, for it will be an +> illusion and you will thereby deceive yourself.†+> +> ― Jiddu Krishnamurti + +NGSI-LD Subscriptions and Registrations provide the basic mechanism to allow the components within a Smart Linked Data +Solution to interact with each other. + +As a brief reminder, within a distributed system, subscriptions inform a third party component that a change in the +context data has occurred (and the component needs to take further actions), whereas registrations tell the context +broker that additional context information is available from another source. + +Both of these operations require that the receiving component fully understands the requests it receives, and is capable +of creating and interpreting the resultant payloads. The differences here between NGSI-v2 and NGSI-LD operations is +small, but there has been a minor amendment to facilite the incorporation of linked data concepts, and therefore the +contract between the various components has changed to include minor updates. + +

Entities within a stock management system

+ +The relationship between our Linked Data entities is defined as shown, in addition to the existing data, the `tweets` +attribute will be supplied by a _Context Provider_. In all other respects this model remains the same as the +[previous tutorial](working-with-linked-data.md) : + +![](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/entities.png) + +

Stock Management frontend

+ +The simple Node.js Express application has updated to use NGSI-LD in the previous +[tutorial](working-with-linked-data.md). We will use the monitor page to watch the status of recent requests, and a two +store pages to buy products. Once the services are running these pages can be accessed from the following URLs: + +

Event Monitor

+ +The event monitor can be found at: `http://localhost:3000/app/monitor` + +![FIWARE Monitor](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/monitor.png) + +

Store 001

+ +Store001 can be found at: `http://localhost:3000/app/store/urn:ngsi-ld:Building:store001` + +![Store](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/store.png) + +

Store 002

+ +Store002 can be found at: `http://localhost:3000/app/store/urn:ngsi-ld:Building:store002` + +![Store2](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/store2.png) + +# Architecture + +The demo Supermarket application will send and receive NGSI-LD calls to a compliant context broker. Since the NGSI-LD +interface is available on the +[Scorpio Broker](https://scorpio.readthedocs.io/en/latest/), the demo application will make use of this +FIWARE component. + +Currently, the Scorpio Broker relies PostgreSQL to keep +persistence of the context data it holds. To request context data from external sources, a simple Context Provider NGSI +proxy has also been added. To visualize and interact with the Context we will add a simple Express application + +Therefore, the architecture will consist of three elements: + +- The [Scorpio Broker](https://scorpio.readthedocs.io/en/latest/) which will receive requests using + [NGSI-LD](https://forge.etsi.org/swagger/ui/?url=https://forge.etsi.org/gitlab/NGSI-LD/NGSI-LD/raw/master/spec/updated/full_api.json) +- The underlying PostgreSQL database : + - Used by the Scorpio Broker to hold context data information such as data entities, subscriptions and + registrations +- The **Stock Management Frontend** which will: + - Display store information + - Show which products can be bought at each store + - Allow users to "buy" products and reduce the stock count. + +Since all interactions between the elements are initiated by HTTP requests, the entities can be containerized and run +from exposed ports. + +![](figures/tutorialArchitecture.png) + +# Interactions between Components + +## Using Subscriptions with NGSI-LD + +Goto `http://localhost:3000/app/store/urn:ngsi-ld:Building:store001` to display and interact with the Supermarket data. + +### Create a Subscription (Store 1) - Low Stock + +NGSI-LD subscriptions can be set up using the `/ngsi-ld/v1/subscriptions/` endpoint and in a similar manner to the +NGSI-v2 `/v2/subscriptions` endpoint. The payload body is slightly different however. Firstly the linked data `@context` +must be present either as an attribute or in the `Link` header. If the `@context` is placed in the body the +`Context-Type` header must state that the payload is `application/ld+json` - i.e. Linked Data plus JSON. The supplied +`@context` will also be used when making notifications as part of the notification request. + +The `type` of the NGSI-LD subscription request is always `type=Subscription`. The structure of the subscription has +changed. When setting up a subscription, there is no longer a separate `subject` section to the payload, entities to +watch and trigger conditions are now set at the same level as the `description` of the subscription. + +- `condition.attrs` has been moved up a level and renamed to `watchedAttributes` +- `condition.expression` has been moved up a level and renamed to `q` + +The `notification` section of the body states that once the conditions of the subscription have been met, a POST request +containing all affected Shelf entities will be sent to the URL `http://tutorial:3000/subscription/low-stock-store001`. +It is now possible to amend the notification payload by requesting `notification.format=keyValues` and remove the +`@context` from the notification body by stating `notification.endpoint.accept=application/json`. The `@context` is not +lost, it is merely passed as a `Link` header. In summary, all of the flags within a subscription work in the same manner +as a GET request to the context broker itself. If no flags are set, a full NGSI-LD response including the `@context` is +returned by default, and the payload can be reduced and amended by adding in further restrictions. + +#### 1 Request: + +```bash +curl -L -X POST 'http://localhost:9090/ngsi-ld/v1/subscriptions/' \ +-H 'Content-Type: application/ld+json' \ +--data-raw '{ + "description": "Notify me of low stock in Store 001", + "type": "Subscription", + "entities": [{"type": "Shelf"}], + "watchedAttributes": ["numberOfItems"], + "q": "numberOfItems<10;locatedIn==urn:ngsi-ld:Building:store001", + "notification": { + "attributes": ["numberOfItems", "stocks", "locatedIn"], + "format": "keyValues", + "endpoint": { + "uri": "http://tutorial:3000/subscription/low-stock-store001", + "accept": "application/json" + } + }, + "@context": "https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld" +}' +``` + +### Create a Subscription (Store 2) - Low Stock + +This second request fires notifications to a different endpoint (URL +`http://tutorial:3000/subscription/low-stock-store002`.) The `notification.format=normalized` and +`notification.endpoint.accept=application/ld+json` will ensure that the `@context` is passed in the body of the +notification request and that the payload will consist of the expanded entities. + +#### 2 Request: + +```bash +curl -L -X POST 'http://localhost:9090/ngsi-ld/v1/subscriptions/' \ +-H 'Content-Type: application/json' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' \ +--data-raw '{ + "description": "LD Notify me of low stock in Store 002", + "type": "Subscription", + "entities": [{"type": "Shelf"}], + "watchedAttributes": ["numberOfItems"], + "q": "numberOfItems<10;locatedIn==urn:ngsi-ld:Building:store002", + "notification": { + "attributes": ["numberOfItems", "stocks", "locatedIn"], + "format": "normalized", + "endpoint": { + "uri": "http://tutorial:3000/subscription/low-stock-store002", + "accept": "application/ld+json" + } + } +}' +``` + +### Read Subscription Details + +Subscription details can be read by making a GET request to the `/ngsi-ld/v1/subscriptions/`. All subscription CRUD +actions continue to be mapped to the same HTTP verbs as before. Adding the `Accept: application/json` will remove the +`@context` element from the response body. + +#### 3 Request: + +```bash +curl -L -X GET 'http://localhost:9090/ngsi-ld/v1/subscriptions/' +``` + +#### Response: + +The response consists of the details of the subscriptions within the system. The parameters within the `q` attribute +have been expanded to use the full URIs, as internally the broker consistently uses long names. The differences between +the payloads offered by the two subscriptions will be discussed below. + +```json +[ + { + "id": "urn:ngsi-ld:Subscription:5e62405ee232da3a07b5fa7f", + "type": "Subscription", + "description": "Notify me of low stock in Store 001", + "entities": [ + { + "type": "Shelf" + } + ], + "watchedAttributes": ["numberOfItems"], + "q": "https://fiware.github.io/tutorials.Step-by-Step/schema/numberOfItems<10;https://fiware.github.io/tutorials.Step-by-Step/schema/locatedIn==urn:ngsi-ld:Building:store001", + "notification": { + "attributes": ["numberOfItems", "stocks", "locatedIn"], + "format": "keyValues", + "endpoint": { + "uri": "http://tutorial:3000/subscription/low-stock-store001", + "accept": "application/json" + } + }, + "@context": "https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld" + }, + { + "id": "urn:ngsi-ld:Subscription:5e624063e232da3a07b5fa80", + "type": "Subscription", + "description": "Notify me of low stock in Store 002", + "entities": [ + { + "type": "Shelf" + } + ], + "watchedAttributes": ["numberOfItems"], + "q": "https://fiware.github.io/tutorials.Step-by-Step/schema/numberOfItems<10;https://fiware.github.io/tutorials.Step-by-Step/schema/locatedIn==urn:ngsi-ld:Building:store002", + "notification": { + "attributes": ["numberOfItems", "stocks", "locatedIn"], + "format": "keyValues", + "endpoint": { + "uri": "http://tutorial:3000/subscription/low-stock-store002", + "accept": "application/json" + } + }, + "@context": "https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld" + } +] +``` + +### Retrieving Subscription Events + +Open two tabs on a browser. Go to the event monitor (`http://localhost:3000/app/monitor`) to see the payloads that are +received when a subscription fires, and then go to store001 +(`http://localhost:3000/app/store/urn:ngsi-ld:Building:store001`) and buy beer until less than 10 items are in stock. +The low stock message should be displayed on screen. + +![low-stock](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/low-stock-warehouse.png) + +`low-stock-store001` is fired when the Products on the shelves within Store001 are getting low, the subscription payload +can be seen below: + +![low-stock-json](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/low-stock-monitor.png) + +The data within the payload consists of key-value pairs of the attributes which were specified in the request. This is +because the subscription was created using the `format=keyValues` attribute. The `@context` is not present in the +payload body since `endpoint.accept=application/json` was set. The effect is to return a `data` array in a very similar +format to the `v2/subscription/` payload. In addition to the `data` array, the `subscriptionId` is included in the +response, along with a `notifiedAt` element which describes when the notification was fired. + +Now go to store002 (`http://localhost:3000/app/store/urn:ngsi-ld:Building:store002`) and buy beer until fewer than 10 +items are in stock. The low stock message is once again displayed on screen, the payload can be seen within the event +monitor. + +![low-stock-ld](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/low-stock-monitor-ld.png) + +The second subscription has been set up to pass the full normalized NGSI-LD payload along with the `@context`. This has +been achieved by using the using the `format=normalized` attribute within the subscription itself, as well as setting +`endpoint.accept=application/ld+json`, so that the `@context` is also passed with each entity. + +## Using Registrations with NGSI-LD + +Context Registrations allow some (or all) data within an entity to be provided by an external context provider. It could +be another full context-provider a separate micro-service which only responds to a subset of the NGSI-LD endpoints. +However, there needs to be a contract created as to who supplies what. + +All registrations can be subdivided into one of two types. Simple registrations where a single context provider is +responsible for the maintenance of the whole entity, and partial registrations where attributes are spread across +multiple context providers. For a simple registration, all context requests are forwarded + +| Request | Action at **Context Broker** | Action at **Context Provider** | +| ---------- | --------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------- | +| **GET** | Pass request to **Context Provider**, proxy the response back unaltered. | Respond to context broker with the result of the GET request based on the entities held internally | +| **PATCH** | Pass request to **Context Provider**, proxy back the HTTP back status code. | Update the entity within the **Context Provider**, Respond to the context broker with a status code | +| **DELETE** | Pass request to **Context Provider** | Delete the entity within the **Context Provider**, Respond to the context broker with a status code | + +Effectively every simple registration is saying _"this entity is held elsewhere"_, but the entity data can be requested +and modified via requests to this context broker. + +For partial registrations the situation is more complex + +| Request | Action at **Context Broker** | Action at **Context Provider** | +| ---------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------ | +| **GET** | Assuming an entity exists locally, pass request for additional proxied attributes to **Context Provider**, concatenate a response back for locally held attributes and additional information from the **Context Provider** | Respond to context broker with the result of the GET request based on the entities held internally | +| **PATCH** | Update any locally held attributes, Pass update requests for additional attributes to **Context Provider**, and return **success** or **partial success** HTTP status code dependent upon the overall result. | Update the requested attributes of the entity held within the **Context Provider**. Respond to the context broker with a status code | +| **DELETE** | If deleting an entity, remove the complete local instance. If deleting locally held attributes remove them. If deleting attributes held in the **Context Provider**, pass request on to **Context Provider** | Delete the entity attributes within the **Context Provider**, Respond to the context broker with a status code | + +Each partial registration is saying _"additional augmented context for this entity is held elsewhere"_. The entity data +can be requested and modified via requests to this context broker. + +With normal operation, the NGSI-LD response does not expose whether data collated from multiple sources is held directly +within the context broker or whether the information has been retrieved externally. It is only when an error occurs +(e.g. timeout) that the HTTP status error code reveals that externally held information could not be retrieved or +amended. + +### Create a Registration + +All NGSI-LD Context Provider Registration actions take place on the `/ngsi-ld/v1/csourceRegistrations/` endpoint. The +standard CRUD mappings apply. The `@context` must be passed either as a `Link` header or within the main body of the +request. + +The body of the request is similar to the NGSI-v2 equivalent with the following modifications: + +- The NGSI-v2 `dataProvided` object is now an array called `information`. +- NGSI-v2 `attrs` have been split into separate arrays of `properties` and `relationships` +- The NGSI-v2 `provider.url` has moved up to `endpoint` + +#### 4 Request: + +```bash +curl -L -X POST 'http://localhost:9090/ngsi-ld/v1/csourceRegistrations/' \ +-H 'Content-Type: application/json' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' \ +--data-raw ' { + "type": "ContextSourceRegistration", + "information": [ + { + "entities": [ + { + "type": "Building", + "id": "urn:ngsi-ld:Building:store001" + } + ], + "properties": [ + "tweets" + ] + } + ], + "endpoint": "http://context-provider:3000/static/tweets" +}' +``` + +### Read Registration Details + +Retrieving the registration details can be made by sending a GET request to the `/ngsi-ld/v1/csourceRegistrations/` +endpoint, along with an appropriate JSON-LD context in the `Link` header. + +#### 5 Request: + +```bash +curl -L -X GET 'http://localhost:9090/ngsi-ld/v1/csourceRegistrations/' \ +-H 'Accept: application/ld+json' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' +``` + +#### Response: + +The response returns the details of the registration. In this case the short names of the `properties` have been +returned, along with the `@context`. + +```json +[ + { + "@context": "https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld", + "id": "urn:ngsi-ld:ContextSourceRegistration:5e6242179c26be5aef9991d4", + "type": "ContextSourceRegistration", + "endpoint": "http://context-provider:3000/static/tweets", + "information": [ + { + "entities": [ + { + "id": "urn:ngsi-ld:Building:store001", + "type": "Building" + } + ], + "properties": ["tweets"] + } + ] + } +] +``` + +### Read from Store 1 + +Once a registration has been set up, the additional registered `properties` and `relationships` are transparently +returned when an requested entity is requested. For simple registrations, a request to obtain the whole entity will be +proxied to the registered `endpoint`, for partial registrations the `properties` and `relationships` are added to the +existing entity held within the context broker. + +#### 6 Request: + +```bash +curl -L -X GET 'http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' \ +-H 'Content-Type: application/json' +``` + +#### Response: + +The response now holds an additional `tweets` Property, which returns the values obtained from +`http://context-provider:3000/static/tweets/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001` - i.e. the forwarding +endpoint. + +```json +{ + "@context": "https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld", + "id": "urn:ngsi-ld:Building:store001", + "type": "Building", + "furniture": { + "type": "Relationship", + "object": ["urn:ngsi-ld:Shelf:unit001", "urn:ngsi-ld:Shelf:unit002", "urn:ngsi-ld:Shelf:unit003"] + }, + "address": { + "type": "Property", + "value": { + "streetAddress": "Bornholmer Straße 65", + "addressRegion": "Berlin", + "addressLocality": "Prenzlauer Berg", + "postalCode": "10439" + }, + "verified": { + "type": "Property", + "value": true + } + }, + "name": { + "type": "Property", + "value": "Bösebrücke Einkauf" + }, + "category": { + "type": "Property", + "value": "commercial" + }, + "location": { + "type": "GeoProperty", + "value": { + "type": "Point", + "coordinates": [13.3986, 52.5547] + } + }, + "tweets": { + "type": "Property", + "value": [ + "It has great practical value – you can wrap it around you for warmth as you bound across the cold moons of Jaglan Beta;", + "You can lie on it on the brilliant marble-sanded beaches of Santraginus V, inhaling the heady sea vapours;", + "You can sleep under it beneath the stars which shine so redly on the desert world of Kakrafoon;", + "Use it to sail a mini raft down the slow heavy river Moth;", + "Wet it for use in hand-to-hand-combat;", + "Wrap it round your head to ward off noxious fumes or to avoid the gaze of the Ravenous Bugblatter Beast of Traal (a mindboggingly stupid animal, it assumes that if you can’t see it, it can’t see you – daft as a bush, but very, very ravenous);", + "You can wave your towel in emergencies as a distress signal, and of course dry yourself off with it if it still seems to be clean enough." + ] + } +} +``` + +The same response data can be seen within the supermarket application itself. In practice this data has been created via +a series of requests - the context broker is responsible for the `urn:ngsi-ld:Building:store001` data, however it checks +to see if any further information can be provided from other sources. In our case the `CSourceRegistration` indicates +that one further attribute _may_ be available. The broker then requests `tweets` information from the context provider, +and provided that it responds in a timely manner, the `tweets` information is added to the resultant payload. + +The supermarket application displays the received data on screen within the supermarket application itself: + +![tweets-1](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/tweets-1.png) + +### Read direct from the Context Provider + +Every context-provider must stand by a fixed contract. At a minimum must be able to respond to varieties of the +`/ngsi-ld/v1/entities/` GET request. If the registration is limited to certain properties, this request will +also contain an `attrs` parameter in the query string. + +Dependent upon the use case of the context-provider, it may or may not need to be able to interpret JSON-LD `@context` - +in this case a request is merely returning the full `tweets` attribute. + +The same request is made by the context broker itself when querying for registered attributes + +#### 7 Request: + +```bash +curl -L -X GET 'http://localhost:3000/static/tweets/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001?attrs=tweets' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' \ +-H 'Content-Type: application/ld+json' +``` + +#### Response: + +As can be seen the `@context` has been returned in the request (since the `Content-Type` header was set). The rest of +the response resembles any standard NGSI-LD request. + +```json +{ + "@context": "https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld", + "id": "urn:ngsi-ld:Building:store001", + "type": "Building", + "tweets": { + "type": "Property", + "value": [ + "It has great practical value – you can wrap it around you for warmth as you bound across the cold moons of Jaglan Beta;", + "You can lie on it on the brilliant marble-sanded beaches of Santraginus V, inhaling the heady sea vapours;", + "You can sleep under it beneath the stars which shine so redly on the desert world of Kakrafoon;", + "Use it to sail a mini raft down the slow heavy river Moth;", + "Wet it for use in hand-to-hand-combat;", + "Wrap it round your head to ward off noxious fumes or to avoid the gaze of the Ravenous Bugblatter Beast of Traal (a mindboggingly stupid animal, it assumes that if you can’t see it, it can’t see you – daft as a bush, but very, very ravenous);", + "You can wave your towel in emergencies as a distress signal, and of course dry yourself off with it if it still seems to be clean enough." + ] + } +} +``` + +### Direct update of the Context Provider + +For a read-write interface it is also possible to amend context data by making a PATCH request to the relevant +`ngsi-ld/v1/entities//attrs` endpoint. + +#### 8 Request: + +```bash +curl -L -X PATCH 'http://localhost:3000/static/tweets/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001/attrs' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' \ +-H 'Content-Type: application/json' \ +--data-raw '{ + "tweets": { + "type": "Property", + "value": [ + "Space is big.", + "You just won'\''t believe how vastly, hugely, mind-bogglingly big it is.", + "I mean, you may think it'\''s a long way down the road to the chemist'\''s, but that'\''s just peanuts to space." + ] + } +}' +``` + +#### 9 Request: + +If the regisitered attribute is requested from the context broker, it returns the _updated_ values obtained from +`http://context-provider:3000/static/tweets/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001` - i.e. the forwarding +endpoint. + +```bash +curl -L -X GET 'http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001?attrs=tweets&options=keyValues' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' +``` + +#### Response: + +This alters the response to match the values updated in the previous PATCH request. + +```json +{ + "@context": "https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld", + "id": "urn:ngsi-ld:Building:store001", + "type": "Building", + "tweets": [ + "Space is big.", + "You just won't believe how vastly, hugely, mind-bogglingly big it is.", + "I mean, you may think it's a long way down the road to the chemist's, but that's just peanuts to space." + ] +} +``` + +Since the context provider is responsible for supplying `tweets` information, changes in the context provider will +always be reflected in requests to the context-broker itself. The supermarket application is calling the context broker +for context regardless of origin, so the updated `tweets` data are displayed on screen within the supermarket +application itself: + +![tweets-2](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/tweets-2.png) + +The context broker is therefore able to return a complete holistic picture of the current state of the world. + +### Forwarded Update + +#### 10 Request: + +A PATCH request to the context broker ( either `ngsi-ld/v1/entities//` or +`ngsi-ld/v1/entities//attrs`) will be forwarded to the registered context provider if a registration is +found. It is therefore possible to alter the state of a context-provider as a side effect. Of course, not all context +providers are necessarily read-write, so attempting to change the attributes of forwarded context may not be fully +respected. + +In this case however a request to PATCH `ngsi-ld/v1/entities/` will be successfully forwarded as a series of +`ngsi-ld/v1/entities//attrs` requests for each regsitered attribute that is found in the registration. + +```bash +curl -L -X PATCH 'http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001/attrs/tweets' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' \ +-H 'Content-Type: application/json' \ +--data-raw '{ + "type": "Property", + "value": [ + "This must be Thursday", + "I never could get the hang of Thursdays." + ] +} ' +``` + +#### 11 Request: + +The result of the previous operation can be seen by retrieving the whole entity using a GET request. + +```bash +curl -L -X GET 'http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001?attrs=tweets&options=keyValues' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' \ +-H 'Content-Type: application/json' +``` + +#### Response: + +This alters the response to match the values updated in the previous PATCH request which was sent to the context broker +and then forwarded to the context provider endpoint. + +```json +{ + "@context": "https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld", + "id": "urn:ngsi-ld:Building:store001", + "type": "Building", + "tweets": ["This must be Thursday", "I never could get the hang of Thursdays."] +} +``` + +As can be seen, the updated `tweets` data is also displayed within the supermarket application itself: + +![tweets-3](https://fiware.github.io/tutorials.LD-Subscriptions-Registrations/img/tweets-3.png) diff --git a/scorpio-broker/docs/en/source/linked-data.md b/scorpio-broker/docs/en/source/linked-data.md new file mode 100644 index 0000000000000000000000000000000000000000..1d310353890e0780d95b99628ceb724e73f73966 --- /dev/null +++ b/scorpio-broker/docs/en/source/linked-data.md @@ -0,0 +1,953 @@ +[![FIWARE Core Context Management](https://nexus.lab.fiware.org/repository/raw/public/badges/chapters/core.svg)](https://github.com/FIWARE/catalogue/blob/master/core/README.md) +[![NGSI LD](https://img.shields.io/badge/NGSI-LD-d6604d.svg)](https://www.etsi.org/deliver/etsi_gs/CIM/001_099/009/01.01.01_60/gs_CIM009v010101p.pdf) +[![JSON LD](https://img.shields.io/badge/JSON--LD-1.1-f06f38.svg)](https://w3c.github.io/json-ld-syntax/) + +**Description:** This tutorial introduces linked data concepts to the FIWARE Platform. The supermarket chain’s store +finder application is recreated using **NGSI-LD** and the differences between the **NGSI v2** and **NGSI-LD** interfaces +are highlighted and discussed. The tutorial is a direct analogue of the original getting started tutorial but uses API +calls from the **NGSI-LD** interface. + +The tutorial uses [cUrl](https://ec.haxx.se/) commands throughout, but is also available as +[Postman documentation](https://fiware.github.io/tutorials.Linked-Data/) + +[![Run in Postman](https://run.pstmn.io/button.svg)](https://github.com/ScorpioBroker/ScorpioBroker/blob/feature-80-temp/docs/en/source/Payloads/FIWARE%20Linked%20Data.postman_collection.json) + +
+ +# Adding Linked Data concepts to FIWARE Data Entities. + +> “Six degrees of separation doesn't mean that everyone is linked to everyone else in just six steps. It means that a +> very small number of people are linked to everyone else in a few steps, and the rest of us are linked to the world +> through those special few.†+> +> ― Malcolm Gladwell, The Tipping Point + +The introduction to FIWARE [Getting Started tutorial](https://github.com/FIWARE/tutorials.Getting-Started) introduced +the [NGSI v2](https://fiware.github.io/specifications/OpenAPI/ngsiv2) interface that is commonly used to create and +manipulate context data entities. An evolution of that interface has created a supplementary specification called +[NGSI-LD](https://forge.etsi.org/swagger/ui/?url=https://forge.etsi.org/gitlab/NGSI-LD/NGSI-LD/raw/master/spec/updated/full_api.json) +as a mechanism to enhance context data entities through adding the concept of **linked data**. This tutorial will +introduce the background of the ideas behind the new interface and compare and contrast how to create and manipulate +data entities as linked data. + +Additional tutorials in the series will further discuss data relationships an how to create context data entities using +linked data enabling the full knowledge graph to be traversed. + +## What is Linked Data? + +All users of the Internet will be familiar with the concept of hypertext links, the way that a link on one web page is +able to guide the browser to loading another page from a known location. + +Whilst humans are able to understand relationship discoverability and how links work, computers find this much more +difficult, and require a well-defined protocol to be able to traverse from one data element to another held in a +separate location. + +Creating a system of readable links for computers requires the use of a well defined data format +([JSON-LD](http://json-ld.org/)) and assignation of unique IDs +([URLs or URNs](https://stackoverflow.com/questions/4913343/what-is-the-difference-between-uri-url-and-urn)) for both +data entities and the relationships between entities so that semantic meaning can be programmatically retrieved from the +data itself. + +Properly defined linked data can be used to help answer big data questions, and the data relationships can be traversed +to answer questions like _"Which products are currently available on the shelves of Store X and what prices are they +sold at?"_ + +### Video: What is Linked Data? + +[![](https://fiware.github.io/tutorials.Step-by-Step/img/video-logo.png)](https://www.youtube.com/watch?v=4x_xzT5eF5Q "Introduction") + +Click on the image above to watch an introductory video on linked data concepts + +JSON-LD is an extension of JSON , it is a standard way of avoiding ambiguity when expressing linked data in JSON so that +the data is structured in a format which is parsable by machines. It is a method of ensuring that all data attributes +can be easily compared when coming from a multitude of separate data sources, which could have a different idea as to +what each attribute means. For example, when two data entities have a `name` attribute how can the computer be certain +that is refers to a _"Name of a thing"_ in the same sense (rather than a **Username** or a **Surname** or something). +URLs and data models are used to remove ambiguity by allowing attributes to have a both short form (such as `name`) and +a fully specified long form (such `http://schema.org/name`) which means it is easy to discover which attribute have a +common meaning within a data structure. + +JSON-LD introduces the concept of the `@context` element which provides additional information allowing the computer to +interpret the rest of the data with more clarity and depth. + +Furthermore the JSON-LD specification enables you to define a unique `@type` associating a well-defined +[data model](https://fiware-datamodels.readthedocs.io/en/latest/guidelines/index.html) to the data itself. + +### Video: What is JSON-LD? + +[![](https://fiware.github.io/tutorials.Step-by-Step/img/video-logo.png)](https://www.youtube.com/watch?v=vioCbTo3C-4 "JSON-LD") + +Click on the image above to watch a video describing the basic concepts behind JSON-LD. + +## What is NGSI-LD? + +**NGSI-LD** is an evolution of the **NGSI v2** information model, which has been modified to improve support for linked +data (entity relationships), property graphs and semantics (exploiting the capabilities offered by JSON-LD). This work +has been conducted under the ETSI ISG CIM initiative and the updated specification has been branded as +[NGSI-LD](https://www.etsi.org/deliver/etsi_gs/CIM/001_099/009/01.01.01_60/gs_CIM009v010101p.pdf). The main constructs +of NGSI-LD are: _Entity_, _Property_ and _Relationship_. NGSI-LD Entities (instances) can be the subject of Properties +or Relationships. In terms of the traditional NGSI v2 data model, Properties can be seen as the combination of an +attribute and its value. Relationships allow to establish associations between instances using linked data. + +### NGSI v2 Data Model + +As a reminder, the NGSI v2 data model is quite simple. It can be summarized as shown below: + +![](https://fiware.github.io/tutorials.Linked-Data/img/ngsi-v2.png) + +The core element of NGSI v2 is the data _entity_, typically a real object with a changing state (such as a **Store**, a +**Shelf** and so on). Entities have _attributes_ (such as `name` and `location`) and these in turn hold _metadata_ such +as `accuracy` - i.e. the accuracy of a `location` reading. + +Every _entity_ must have a `type` which defines the sort of thing the entity describes, but giving an NGSI v2 entity the +`type=Store` is relatively meaningless as no-one is obliged to shape their own **Store** entities in the same fashion. +Similarly adding an attribute called `name` doesn't suddenly make it hold the same data as someone else's `name` +attribute. + +Relationships can be defined using NGSI v2, but only so far as giving the attribute an appropriate attribute name +defined by convention ( e.g. starting with `ref`, such as `refManagedBy`) and assigning the attribute +`type=Relationship` which again is purely a naming convention with no real semantic weight. + +### NGSI LD Data Model + +The NGSI LD data model is more complex, with more rigid definitions of use which lead to a navigable knowledge graph. + +![](https://fiware.github.io/tutorials.Linked-Data/img/ngsi-ld.png) + +Once again, _entity_ can be considered to be the core element. Every entity must use a unique `id` which must be a URI, +often a [URN](https://en.wikipedia.org/wiki/Uniform_resource_name), there is also a `type`, used to define the structure +of the data held, which must also be a URI. This URI should correspond to a well-defined data model which can be found +on the web. For example the URI `https://uri.fiware.org/ns/data-models#Building` is used to define common data model for +a [Building](https://fiware-datamodels.readthedocs.io/en/latest/Building/Building/doc/spec/index.html). + +_Entities_ can have _properties_ and _relationships_. Ideally the name of each _property_ should also be a well defined +URI which corresponds to a common concept found across the web (e.g. `http://schema.org/address` is a common URI for the +physical address of an item). The _property_ will also have a value which will reflect the state of that property (e.g +`name="Checkpoint Markt"`). Finally a property may itself have further properties (a.k.a. _properties-of-properties_) +which reflect further information about the property itself. Properties and relationships may in turn have a linked +embedded structure (of _properties-of-properties_ or _properties-of-relationships or relationships-of-properties_ or +_relationships-of-relationships_ etc.) which lead to the following: + +An NGSI LD Data Entity (e.g. a supermarket): + +- Has an `id` which must be unique. For example `urn:ngsi-ld:Building:store001`, +- Has `type` which should be a fully qualified URI of a well defined data model. For example + `https://uri.fiware.org/ns/data-models#Building`. Authors can also use type names, as short hand strings for types, + mapped to fully qualified URIs through the JSON-LD `@context`. +- Has _property_ of the entity, for example, an `address` attribute which holds the address of the store. This can be + expanded into `http://schema.org/address`, which is known as a fully qualified name + ([FQN](https://en.wikipedia.org/wiki/Fully_qualified_name)). +- The `address`, like any _property_ will have a _value_ corresponding to the _property_ `address` (e.g. _Bornholmer + Straße 65, 10439 Prenzlauer Berg, Berlin_ +- Has a _property-of-a-property_ of the entity, for example a `verified` field for the `address`. +- Has a _relationship_ of the entity, for example, a `managedBy` field where the relationship `managedBy` corresponds + to another data entity : `urn:ngsi-ld:Person:bob-the-manager` +- The relationship `managedBy`, may itself have a _property-of-a-relationship_ (e.g. `since`), this holds the date Bob + started working the store +- The relationship `managedBy`, may itself have a _relationship-of-a-relationship_ (e.g. `subordinateTo`), this holds + the URN of the area manager above Bob in the hierarchy. + +As you can see the knowledge graph is well defined and can be expanded indefinitely. + +Relationships will be dealt with in more detail in a subsequent tutorial. + +# Architecture +Our demo application will only make use of one FIWARE component - the +[Scorpio Broker](https://scorpio.readthedocs.io/en/latest/). Usage of the Scorpio Context Broker (with proper +context data flowing through it) is sufficient for an application to qualify as _“Powered by FIWAREâ€_. + +![Deployment Architecture](figures/deploymentarchitecture.png) + +The deployment architecture leverages the Spring Cloud framework that addresses lots of Micro-services concerns (e.g. scaling, monitoring, fault-tolerant, highly available, secure, decoupled, etc. ) and Kafka based distributed and scalable message queue infrastructure to provide high performance on message processing for a huge number of context requests which is usual in the IoT domain. +The deployment architecture covers the high-level operations (Http based REST with method POST/GET/DELETE/PATCH) request flow from the external world to the Scorpio Broker system. The external request is served through a unified service API gateway interface that exposes a single IP/port combination to be used for all services that the Scorpio Broker system can provide. +In reality, each of the Scorpio Broker services will be implemented as a micro-service that can be deployed as an independent standalone unit in adistributed computing environment. + +The necessary configuration information can be seen in the services section of the associated `docker-compose.yml` file: + +## Prerequisites + +### Docker + +Docker is a tool designed to make it easier to create, deploy, and run applications by using containers. +Containers allow a developer to package up an application with all of the parts it needs, such as libraries and other dependencies, and deploy it as one package + +- To get Docker on Windows, click [here](https://docs.docker.com/docker-for-windows/) +- To get Docker on Mac, click [here](https://docs.docker.com/docker-for-mac/) +- To get Docker on Linux, click [here](https://docs.docker.com/install/) + +### Getting a docker container + +The current maven build supports two types of docker container +generations from the build using maven profiles to trigger it. + +The first profile is called 'docker' and can be called like this +```bash +mvn clean package -DskipTests -Pdocker +``` +this will generate individual docker containers for each microservice. +The corresponding docker-compose file is docker-compose-dist.yml + +The second profile is called 'docker-aaio' (for almost all in one). This +will generate one single docker container for all components of the +broker except the Kafka message bus and the Postgres database. + +To get the aaio version run the maven build like this +```bash +mvn clean package -DskipTests -Pdocker-aaio +``` +The corresponding docker-compose file is docker-compose-aaio.yml + +### General remark for the Kafka docker image and docker-compose + +The Kafka docker container requires you to provide the environment +variable KAFKA\_ADVERTISED\_HOST\_NAME. This has to be changed in the +docker-compose files to match your docker host IP. You can use 127.0.0.1 +however this will disallow you to run Kafka in a cluster mode. + +For further details please refer to + + +### Running docker build outside of Maven + +If you want to have the build of the jars separated from the docker +build you need to provide certain VARS to docker. The following list +shows all the vars and their intended value if you run docker build from +the root dir + +```bash +- BUILD\_DIR\_ACS = Core/AtContextServer +- BUILD\_DIR\_SCS = SpringCloudModules/config-server +- BUILD\_DIR\_SES =SpringCloudModules/eureka +- BUILD\_DIR\_SGW =SpringCloudModules/gateway +- BUILD\_DIR\_HMG = History/HistoryManager +- BUILD\_DIR\_QMG = Core/QueryManager +- BUILD\_DIR\_RMG =Registry/RegistryManager +- BUILD\_DIR\_EMG = Core/EntityManager +- BUILD\_DIR\_STRMG = Storage/StorageManager +- BUILD\_DIR\_SUBMG =Core/SubscriptionManager +- JAR\_FILE\_BUILD\_ACS = AtContextServer-\${project.version}.jar +- JAR\_FILE\_BUILD\_SCS = config-server-\${project.version}.jar +- JAR\_FILE\_BUILD\_SES = eureka-server-\${project.version}.jar +- JAR\_FILE\_BUILD\_SGW = gateway-\${project.version}.jar +- JAR\_FILE\_BUILD\_HMG = HistoryManager-\${project.version}.jar +- JAR\_FILE\_BUILD\_QMG = QueryManager-\${project.version}.jar +- JAR\_FILE\_BUILD\_RMG = RegistryManager-\${project.version}.jar +- JAR\_FILE\_BUILD\_EMG = EntityManager-\${project.version}.jar +- JAR\_FILE\_BUILD\_STRMG = StorageManager-\${project.version}.jar +- JAR\_FILE\_BUILD\_SUBMG = SubscriptionManager-\${project.version}.jar +- JAR\_FILE\_RUN\_ACS = AtContextServer.jar +- JAR\_FILE\_RUN\_SCS =config-server.jar +- JAR\_FILE\_RUN\_SES = eureka-server.jar +- JAR\_FILE\_RUN\_SGW = gateway.jar +- JAR\_FILE\_RUN\_HMG =HistoryManager.jar +- JAR\_FILE\_RUN\_QMG = QueryManager.jar +- JAR\_FILE\_RUN\_RMG = RegistryManager.jar +- JAR\_FILE\_RUN\_EMG =EntityManager.jar +- JAR\_FILE\_RUN\_STRMG = StorageManager.jar +- JAR\_FILE\_RUN\_SUBMG = SubscriptionManager.jar +``` + +# Creating a "Powered by FIWARE" app based on Linked Data + +This tutorial recreates the same data entities as the initial _"Powered by FIWARE"_ supermarket finder app, but using +NGSI-LD linked data entities rather than NGSI v2. + + +## Creating Context Data + +When creating linked data entities, it is important to use common data models. This will allow us to easily combine data +from multiple sources and remove ambiguity when comparing data coming from different sources. + +Creating linked data using fully qualified names throughout would be painful, as each attribute would need to be a URI, +so JSON-LD introduces the idea of an `@context` attribute which can hold pointers to context definitions. To add a +FIWARE [Building](https://fiware-datamodels.readthedocs.io/en/latest/Building/Building/doc/spec/index.html) data entity, +the following `@context` would be required + +```json +{ + "id": "urn:ngsi-ld:Building:store001", + "type": "Building", + ... other data attributes + "@context": [ + "https://fiware.github.io/data-models/context.jsonld", + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld" + ] +} +``` + +### Core Context + +[https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld](https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld) +refers to the Core `@context` of NGSI-LD, this defines element such as `id` and `type` which are common to all NGSI +entities, as well as defining terms such as `Property` and `Relationship`. The core context is so fundamental to +NGSI-LD, that it is added by default to any `@context` sent to a request. + +### FIWARE Data Models + +[https://fiware.github.io/data-models/context.jsonld](https://fiware.github.io/data-models/context.jsonld) refers to the +definition of standard data models supplied by FIWARE. Adding this to the `@context` will load the definitions of all +the [data models](https://fiwaredata-models.readthedocs.io) defined by the FIWARE Foundation in collaboration with other +organizations such as [GSMA](https://www.gsma.com/) and [TM Forum](https://www.tmforum.org/). A summary of the FQNs +related to **Building** can be seen below: + +```json +{ + "@context": { + "Building": "https://uri.fiware.org/ns/data-models#Building", + ... etc + "address": "http://schema.org/address", + "category": "https://uri.fiware.org/ns/data-models#category", + "location": "http://uri.etsi.org/ngsi-ld/location", + ...etc + } +} +``` + +If we include this context definition, it means that we will be able to use short names for `Building`, `address`, +`location` for our entities, but computers will also be able to read the FQNs when comparing with other sources. + +To create a valid **Building** data entity in the context broker, make a POST request to the +`http://localhost:9090/ngsi-ld/v1/entities` endpoint as shown below. It is essential that the appropriate +`Content-Type: application/ld+json` is also used, so that the data entity is recognized as Linked data. + +#### 2 Request: + +```bash +curl -iX POST \ + http://localhost:9090/ngsi-ld/v1/entities \ + -H 'Content-Type: application/ld+json' \ + -d '{ + "id": "urn:ngsi-ld:Building:store001", + "type": "Building", + "category": { + "type": "Property", + "value": ["commercial"] + }, + "address": { + "type": "Property", + "value": { + "streetAddress": "Bornholmer Straße 65", + "addressRegion": "Berlin", + "addressLocality": "Prenzlauer Berg", + "postalCode": "10439" + }, + "verified": { + "type": "Property", + "value": true + } + }, + "location": { + "type": "GeoProperty", + "value": { + "type": "Point", + "coordinates": [13.3986, 52.5547] + } + }, + "name": { + "type": "Property", + "value": "Bösebrücke Einkauf" + }, + "@context": [ + "https://fiware.github.io/data-models/context.jsonld", + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld" + ] +}' +``` + +The first request will take some time, as the context broker must navigate and load all of the files mentioned in the +`@context`. + +> **Note**: if `https://fiware.github.io/data-models/context.jsonld` is unavailable for some reason the request will +> fail +> +> For a working production system it is essential that the `@context` files are always available to ensure third parties +> can read the context. High availability infrastructure has not been considered for this tutorial to keep the +> architecture simple. + +#### 3 Request: + +Each subsequent entity must have a unique `id` for the given `type` + +```bash +curl -iX POST \ + http://localhost:9090/ngsi-ld/v1/entities/ \ + -H 'Content-Type: application/ld+json' \ + -d '{ + "id": "urn:ngsi-ld:Building:store002", + "type": "Building", + "category": { + "type": "Property", + "value": ["commercial"] + }, + "address": { + "type": "Property", + "value": { + "streetAddress": "Friedrichstraße 44", + "addressRegion": "Berlin", + "addressLocality": "Kreuzberg", + "postalCode": "10969" + }, + "verified": { + "type": "Property", + "value": true + } + }, + "location": { + "type": "GeoProperty", + "value": { + "type": "Point", + "coordinates": [13.3903, 52.5075] + } + }, + "name": { + "type": "Property", + "value": "Checkpoint Markt" + }, + "@context": [ + "https://fiware.github.io/data-models/context.jsonld", + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld" + ] +}' +``` + +### Defining Properties within the NGSI-LD entity definition + +The attributes `id` and `type` should be familiar to anyone who has used NGSI v2, and these have not changed. As +mentioned above, the type should refer to an included data model, in this case `Building` is being used as a short name +for the included URN `https://uri.fiware.org/ns/data-models#Building`. Thereafter each _property_ is defined as a JSON +element containing two attributes, a `type` and a `value`. + +The `type` of a _property_ attribute must be one of the following: + +- `"GeoProperty"`: `"http://uri.etsi.org/ngsi-ld/GeoProperty"` for locations. Locations should be specified as + Longitude-Latitude pairs in [GeoJSON format](https://tools.ietf.org/html/rfc7946). The preferred name for the + primary location attribute is `location` +- `"Property"`: `"http://uri.etsi.org/ngsi-ld/Property"` - for everything else. +- `"Property"` should also be used for all time-based values, but the property `value` should be Date, Time or + DateTime strings encoded in the [ISO 8601 format](https://en.wikipedia.org/wiki/ISO_8601) - e.g. + `YYYY-MM-DDThh:mm:ssZ` + +> **Note:** that for simplicity, this data entity has no relationships defined. Relationships must be given the +> `type="Relationship`. Relationships will be discussed in a subsequent tutorial. + +### Defining Properties-of-Properties within the NGSI-LD entity definition + +_Properties-of-Properties_ is the NGSI-LD equivalent of metadata (i.e. _"data about data"_), it is use to describe +properties of the attribute value itself like accuracy, provider, or the units to be used. Some built-in metadata +attributes already exist and these names are reserved: + +- `createdAt` (type: DateTime): attribute creation date as an ISO 8601 string. +- `modifiedAt` (type: DateTime): attribute modification date as an ISO 8601 string. + +Additionally `observedAt`, `datasetId` and `instanceId` may optionally be added in some cases, and `location`, +`observationSpace` and `operationSpace` have special meaning for Geoproperties. + +In the examples given above, one element of metadata (i.e. a _property-of-a-property_) can be found within the `address` +attribute. a `verified` flag indicates whether the address has been confirmed. The commonest _property-of-a-property_ is +`unitCode` which should be used to hold the UN/CEFACT +[Common Codes](http://wiki.goodrelations-vocabulary.org/Documentation/UN/CEFACT_Common_Codes) for Units of Measurement. + +## Querying Context Data + +A consuming application can now request context data by making NGSI-LD HTTP requests to the Scorpio Broker. The +existing NGSI-LD interface enables us to make complex queries and filter results and retrieve data with FQNs or with +short names. + +### Obtain entity data by FQN Type + +This example returns the data of all `Building` entities within the context data The `type` parameter is mandatory for +NGSI-LD and is used to filter the response. The Accept HTTP header is needed to retrieve JSON-LD content. + +#### 4 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities' \ + -H 'Accept: application/ld+json' \ + -d 'type=https://uri.fiware.org/ns/data-models%23Building' +``` + +#### Response: + +The response returns the Core `@context` by default (`https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld`) and +all attributes are expanded whenever possible. + +- `id`, `type`, `location` and `name` are defined in the core context and are not expanded. +- `address` has been mapped to `http://schema.org/address` +- `category` has been mapped to `https://uri.fiware.org/ns/data-models#category` + +Note that if an attribute has not been not associated to an FQN when the entity was created, the short name will +**always** be displayed. + +```json +[ + { + "id": "urn:ngsi-ld:Building:store001", + "type": "https://uri.fiware.org/ns/data-models#Building", + "http://schema.org/address": { + "type": "Property", + "value": { + "streetAddress": "Bornholmer Straße 65", + "addressRegion": "Berlin", + "addressLocality": "Prenzlauer Berg", + "postalCode": "10439" + }, + "verified": { + "type": "Property", + "value": true + } + }, + "name": { + "type": "Property", + "value": "Bösebrücke Einkauf" + }, + "https://uri.fiware.org/ns/data-models#category": { + "type": "Property", + "value": ["commercial"] + }, + "location": { + "type": "GeoProperty", + "value": { + "type": "Point", + "coordinates": [13.3986, 52.5547] + } + }, + "@context": "https://uri.etsi.org/ngsi-lv1/ngsi-ld-core-context.jsonld" + }, + { + "id": "urn:ngsi-ld:Building:store002", + "type": "https://uri.fiware.org/ns/data-models#Building", + "http://schema.org/address": { + "type": "Property", + "value": { + "streetAddress": "Friedrichstraße 44", + "addressRegion": "Berlin", + "addressLocality": "Kreuzberg", + "postalCode": "10969" + }, + "verified": { + "type": "Property", + "value": true + } + }, + "name": { + "type": "Property", + "value": "Checkpoint Markt" + }, + "https://uri.fiware.org/ns/data-models#category": { + "type": "Property", + "value": ["commercial"] + }, + "location": { + "type": "GeoProperty", + "value": { + "type": "Point", + "coordinates": [13.3903, 52.5075] + } + }, + "@context": "https://uri.etsi.org/ngsi-lv1/ngsi-ld-core-context.jsonld" + } +] +``` + +### Obtain entity data by ID + +This example returns the data of `urn:ngsi-ld:Building:store001` + +#### 5 Request: + +```bash +curl -G -X GET \ + -H 'Accept: application/ld+json' \ + 'http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001' +``` + +#### Response: + +The response returns the Core `@context` by default (`https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld`) and +all attributes are expanded whenever possible. + +```json +{ + "id": "urn:ngsi-ld:Building:store001", + "type": "https://uri.fiware.org/ns/data-models#Building", + "http://schema.org/address": { + "type": "Property", + "value": { + "streetAddress": "Bornholmer Straße 65", + "addressRegion": "Berlin", + "addressLocality": "Prenzlauer Berg", + "postalCode": "10439" + }, + "verified": { + "type": "Property", + "value": true + } + }, + "name": { + "type": "Property", + "value": "Bösebrücke Einkauf" + }, + "https://uri.fiware.org/ns/data-models#category": { + "type": "Property", + "value": ["commercial"] + }, + "location": { + "type": "GeoProperty", + "value": { + "type": "Point", + "coordinates": [13.3986, 52.5547] + } + }, + "@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld" +} +``` + +### Obtain entity data by type + +If a reference to the supplied data is supplied, it is possible to return short name data and limit responses to a +specific `type` of data. For example, the request below returns the data of all `Building` entities within the context +data. Use of the `type` parameter limits the response to `Building` entities only, use of the `options=keyValues` query +parameter reduces the response down to standard JSON-LD. + +A [`Link` header](https://www.w3.org/wiki/LinkHeader) must be supplied to associate the short form `type="Building"` +with the FQN `https://uri.fiware.org/ns/data-models#Building`. The full link header syntax can be seen below: + +```text +Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json +``` + +The standard HTTP `Link` header allows metadata (in this case the `@context`) to be passed in without actually touching +the resource in question. In the case of NGSI-LD, the metadata is a file in `application/ld+json` format. + +#### 6 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities' \ + -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' \ + 'http://localhost:9090/ngsi-ld/v1/entities' \ + -H 'Accept: application/ld+json' \ + -d 'type=Building' \ + -d 'options=keyValues' +``` + +#### Response: + +Because of the use of the `options=keyValues`, the response consists of JSON only without the attribute definitions +`type="Property"` or any _properties-of-properties_ elements. You can see that `Link` header from the request has been +used as the `@context` returned in the response. + +```json +[ + { + "id": "urn:ngsi-ld:Building:store001", + "type": "Building", + "address": { + "streetAddress": "Bornholmer Straße 65", + "addressRegion": "Berlin", + "addressLocality": "Prenzlauer Berg", + "postalCode": "10439" + }, + "name": "Bösebrücke Einkauf", + "category": ["commercial"], + "location": { + "type": "Point", + "coordinates": [13.3986, 52.5547] + }, + "@context": "https://fiware.github.io/data-models/context.jsonld" + }, + { + "id": "urn:ngsi-ld:Building:store002", + "type": "Building", + "address": { + "streetAddress": "Friedrichstraße 44", + "addressRegion": "Berlin", + "addressLocality": "Kreuzberg", + "postalCode": "10969" + }, + "name": "Checkpoint Markt", + "category": ["commercial"], + "location": { + "type": "Point", + "coordinates": [13.3903, 52.5075] + }, + "@context": "https://fiware.github.io/data-models/context.jsonld" + } +] +``` + +### Filter context data by comparing the values of an attribute + +This example returns all `Building` entities with the `name` attribute _Checkpoint Markt_. Filtering can be done using +the `q` parameter - if a string has spaces in it, it can be URL encoded and held within double quote characters `"` = +`%22`. + +#### 7 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities' \ + -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + -H 'Accept: application/ld+json' \ + -d 'type=Building' \ + -d 'q=name==%22Checkpoint%20Markt%22' \ + -d 'options=keyValues' +``` + +#### Response: + +The `Link` header `https://fiware.github.io/data-models/context.jsonld` holds an array of `@context` as shown: + +```json +{ + "@context": [ + "https://fiware.github.io/data-models/context.jsonld", + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld" + ] +} +``` + +and therefore includes the FIWARE Building model. + +This means that use of the `Link` header and the `options=keyValues` parameter reduces the response to short form +JSON-LD as shown: + +```json +[ + { + "id": "urn:ngsi-ld:Building:store002", + "type": "Building", + "address": { + "streetAddress": "Friedrichstraße 44", + "addressRegion": "Berlin", + "addressLocality": "Kreuzberg", + "postalCode": "10969" + }, + "name": "Checkpoint Markt", + "category": ["commercial"], + "location": { + "type": "Point", + "coordinates": [13.3903, 52.5075] + }, + "@context": "https://fiware.github.io/data-models/context.jsonld" + } +] +``` + +### Filter context data by comparing the values of an attribute in an Array + +Within the standard `Building` model, the `category` attribute refers to an array of strings. This example returns all +`Building` entities with a `category` attribute which contains either `commercial` or `office` strings. Filtering can be +done using the `q` parameter, comma separating the acceptable values. + +#### 8 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities' \ + -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + -H 'Accept: application/ld+json' \ + -d 'type=Building' \ + -d 'q=category==%22commercial%22,%22office%22 \ + -d 'options=keyValues' +``` + +#### Response: + +The response is returned in JSON-LD format with short form attribute names: + +```json +[ + { + "id": "urn:ngsi-ld:Building:store001", + "type": "Building", + "address": { + "streetAddress": "Bornholmer Straße 65", + "addressRegion": "Berlin", + "addressLocality": "Prenzlauer Berg", + "postalCode": "10439" + }, + "name": "Bösebrücke Einkauf", + "category": ["commercial"], + "location": { + "type": "Point", + "coordinates": [13.3986, 52.5547] + }, + "@context": "https://fiware.github.io/data-models/context.jsonld" + }, + { + "id": "urn:ngsi-ld:Building:store002", + "type": "Building", + "address": { + "streetAddress": "Friedrichstraße 44", + "addressRegion": "Berlin", + "addressLocality": "Kreuzberg", + "postalCode": "10969" + }, + "name": "Checkpoint Markt", + "category": ["commercial"], + "location": { + "type": "Point", + "coordinates": [13.3903, 52.5075] + }, + "@context": "https://fiware.github.io/data-models/context.jsonld" + } +] +``` + +### Filter context data by comparing the values of a sub-attribute + +This example returns all stores found in the Kreuzberg District. + +Filtering can be done using the `q` parameter - sub-attributes are annotated using the bracket syntax e.g. +`q=address[addressLocality]=="Kreuzberg"`. This differs from NGSI v2 where dot syntax was used. + +#### 9 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities' \ + -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + -H 'Accept: application/ld+json' \ + -d 'type=Building' \ + -d 'q=address[addressLocality]==%22Kreuzberg%22' \ + -d 'options=keyValues' +``` + +#### Response: + +Use of the `Link` header and the `options=keyValues` parameter reduces the response to JSON-LD. + +```json +[ + { + "id": "urn:ngsi-ld:Building:store002", + "type": "Building", + "address": { + "streetAddress": "Friedrichstraße 44", + "addressRegion": "Berlin", + "addressLocality": "Kreuzberg", + "postalCode": "10969" + }, + "name": "Checkpoint Markt", + "category": ["commercial"], + "location": { + "type": "Point", + "coordinates": [13.3903, 52.5075] + }, + "@context": "https://fiware.github.io/data-models/context.jsonld" + } +] +``` + +### Filter context data by querying metadata + +This example returns the data of all `Building` entities with a verified address. The `verified` attribute is an example +of a _Property-of-a-Property_ + +Metadata queries (i.e. Properties of Properties) are annotated using the dot syntax e.g. `q=address.verified==true`. +This supersedes the `mq` parameter from NGSI v2. + +#### 10 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities' \ + -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + -H 'Accept: application/json' \ + -d 'type=Building' \ + -d 'mq=address.verified==true' \ + -d 'options=keyValues' +``` + +#### Response: + +Because of the use of the `options=keyValues`, together with the Accept HTTP header (`application/json`), the response +consists of **JSON only** without an `@context` or attribute `type` and `metadata` elements. + +```json +[ + { + "id": "urn:ngsi-ld:Building:store001", + "type": "Building", + "address": { + "streetAddress": "Bornholmer Straße 65", + "addressRegion": "Berlin", + "addressLocality": "Prenzlauer Berg", + "postalCode": "10439" + }, + "location": { + "type": "Point", + "coordinates": [13.3986, 52.5547] + }, + "name": "Bösebrücke Einkauf" + }, + { + "id": "urn:ngsi-ld:Building:store002", + "type": "Building", + "address": { + "streetAddress": "Friedrichstraße 44", + "addressRegion": "Berlin", + "addressLocality": "Kreuzberg", + "postalCode": "10969" + }, + "location": { + "type": "Point", + "coordinates": [13.3903, 52.5075] + }, + "name": "Checkpoint Markt" + } +] +``` + +### Filter context data by comparing the values of a geo:json attribute + +This example return all Stores within 2km the **Brandenburg Gate** in **Berlin** (_52.5162N 13.3777W_). To make a +geo-query request, three parameters must be specified, `geometry`, `coordinates` and `georel`. + +The syntax for NGSI-LD has been updated, the `coordinates` parameter is now represented in +[geoJSON](https://tools.ietf.org/html/rfc7946) including the square brackets rather than the simple lat-long pairs +required in NGSI v2. + +Note that by default the geo-query will be applied to the `location` attribute, as this is default specified in NGSI-LD. +If another attribute is to be used, an additional `geoproperty` parameter is required. + +#### 11 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities' \ + -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/json"' + -H 'Accept: application/json' \ + -d 'type=Building' \ + -d 'geometry=Point' \ + -d 'coordinates=[13.3777,52.5162]' \ + -d 'georel=near;maxDistance==2000' \ + -d 'options=keyValues' +``` + +#### Response: + +Because of the use of the `options=keyValues` together with the Accept HTTP header (`application/json`), the response +consists of **JSON only** without an `@context` or attribute `type` and `metadata` elements. + +```json +[ + { + "id": "urn:ngsi-ld:Building:store002", + "type": "Building", + "address": { + "streetAddress": "Friedrichstraße 44", + "addressRegion": "Berlin", + "addressLocality": "Kreuzberg", + "postalCode": "10969" + }, + "location": { + "type": "Point", + "coordinates": [13.3903, 52.5075] + }, + "name": "Checkpoint Markt" + } +] +``` diff --git a/scorpio-broker/docs/en/source/make.bat b/scorpio-broker/docs/en/source/make.bat new file mode 100644 index 0000000000000000000000000000000000000000..33043e10ffa85df73a778e5d30e268b073319980 --- /dev/null +++ b/scorpio-broker/docs/en/source/make.bat @@ -0,0 +1,36 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build +set SPHINXPROJ=ScorpioBroker + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% + +:end +popd diff --git a/scorpio-broker/docs/en/source/mqtt.rst b/scorpio-broker/docs/en/source/mqtt.rst new file mode 100644 index 0000000000000000000000000000000000000000..ea847157d39e111273e672b6007eedda4fe1b2ca --- /dev/null +++ b/scorpio-broker/docs/en/source/mqtt.rst @@ -0,0 +1,144 @@ +****************** +MQTT Notifications +****************** + +MQTT is a pub/sub based message bus and works with topics. For more detailed information please visit https://mqtt.org/. +NGSI-LD allows you to receive notifications via MQTT. +A subscription received via HTTP specifies an MQTT endpoint in the "notification.endpoint.uri" member of the subscription and the MQTT notification binding is supported by the NGSI-LD implementation, notifications related to this subscription shall be sent via the MQTT protocol. + +The syntax of an MQTT endpoint URI is **mqtt[s]://[][:]@[:]/[/]*** and follows an existing convention for representing an MQTT endpoint as a URI. + +Username and password can be optionally specified as part of the endpoint URI. If the port is not explicitly specified, the default MQTT port is **1883** for MQTT over TCP and **8883** for MQTTS, +For the MQTT protocol, there are currently two versions supported, **MQTTv3.1.1** and **MQTTv5.0**. + +.. figure:: figures/MQTT.jpg + +The flow of Scorpio broker notification via MQTT:- + +1. Subscribe to TOPIC. +2. Create NGSI-LD Subscription, with MQTT Server's URI as a contact point to send Notifications. +3. Publish Notifications to TOPIC extracted from URI. +4. Send Notifications from the MQTT server to the MQTT subscriber. + +To start the MQTT broker follow the below step:- + +1. Install the MQTT broker (Mosquitto). +2. Add chrome extension MQTTlens. +3. Create the MQTT broker connection. +4. Subscribe the topic. + +Operations +############ + +1. Entity Creation +********************* + +To create the entity, hit the endpoint **http://:/ngsi-ld/v1/entities/** with the given payload. + +.. code-block:: JSON + + { + "id":"urn:ngsi-ld:Vehicle:A135", + "type":"Vehicle", + "brandName":{ + "type":"Property", + "value":"Mercedes" + }, + "speed":[{ + "type":"Property", + "value": 55, + "datasetId": "urn:ngsi-ld:Property:speedometerA4567-speed", + "source":{ + "type":"Property", + "value": "Speedometer" + } + }, + { + "type":"Property", + "value": 11, + "datasetId": "urn:ngsi-ld:Property:gpsA4567-speed", + "source":{ + "type":"Property", + "value": "GPS" + } + }, + { + "type":"Property", + "value": 10, + "source":{ + "type":"Property", + "value": "CAMERA" + } + }] + } + +2. Subscription +********************* + +To subscribe to the entity, hit the endpoint **http://:/ ngsi-ld/v1/subscriptions/** with the given payload. + +.. code-block:: JSON + + { + "id": "urn:ngsi-ld:Subscription:16", + "type": "Subscription", + "entities": [{ + "id": "urn:ngsi-ld:Vehicle:A135", + "type": "Vehicle" + }], + "watchedAttributes": ["brandName"], + "q":"brandName!=Mercedes", + "notification": { + "attributes": ["brandName"], + "format": "keyValues", + "endpoint": { + "uri": "mqtt://localhost:1883/notify", + "accept": "application/json", + "notifierinfo": { + "version" : "mqtt5.0", + "qos" : 0 + } + } + } + } + +3. Notification +********************* + +If we update the value of the attribute and making the PATCH request at **http://:/ngsi-ld/v1/entities/entityId/attrs** + +.. code-block:: JSON + + { + "brandName":{ + "type":"Property", + "value":"BMW" + } + } + +then, we get the notification. + +.. code-block:: JSON + + { + "metadata": { + "link": "https://json-ld.org/contexts/person.jsonld", + "contentType": "application/json" + }, + "body": { + "id": "ngsildbroker:notification:-7550927064189664633", + "type": "Notification", + "data": [{ + "id": "urn:ngsi-ld:Vehicle:A135", + "type": "Vehicle", + "brandName": { + "type": "Property", + "createdAt": "2020-07-29T07:19:33.872000Z", + "value": "BMW", + "modifiedAt": "2020-07-29T07:51:21.183000Z" + } + }], + "notifiedAt": "2020-07-29T07:51:22.300000Z", + "subscriptionId": "urn:ngsi-ld:Subscription:16" + } + } diff --git a/scorpio-broker/docs/en/source/multivalue.rst b/scorpio-broker/docs/en/source/multivalue.rst new file mode 100644 index 0000000000000000000000000000000000000000..7a079db848da096621d1588f65cfb235edd51469 --- /dev/null +++ b/scorpio-broker/docs/en/source/multivalue.rst @@ -0,0 +1,111 @@ +************************* +Multi-value Attribute +************************* + +Multi-value Attribute is a feature through which an Entity can simultaneously have Attributes with more than one instance. +In the case of Properties, there may be more than one source at a time that provides a Property value, e.g. based on independent sensor measurements with different quality characteristics. + +For example: take a speedometer and a GPS both providing the current speed of a car or take a thermometer or an infrared camera both provides the temperature of the body. + +.. figure:: figures/multivalueDiagram.png + +In the case of Relationships, there may be non-functional Relationships, e.g. for a room, there may be multiple "contains" Relationships to all sorts of objects currently in the room that have been put there by different people and which are dynamically changing over time. +To be able to explicitly manage such multi-attributes, the optional datasetId property is used, which is of datatype URI. + + +CRUD Operations +-------------------- + + +If a datasetId is provided when creating, updating, appending or deleting Attributes, only instances with the same datasetId are affected, leaving instances with another datasetId or an instance without a datasetId untouched. +If no datasetId is provided, it is considered as the default Attribute instance. It is not required to explicitly provide this default datasetId, but even if not present it is treated as if this default datasetId was present in the request(s). +Thus the creation, updating, appending or deleting of Attributes without providing a datasetId only affects the default property instance. + +Note:-There can only be one default Attribute instance for an Attribute with a given Attribute Name in any request or response. + +When requesting Entity information, if there are multiple instances of matching Attributes these are returned as arrays of Attributes respectively, instead of a single Attribute element. The datasetId of the default Attribute instance is never explicitly included in responses. +In case of conflicting information for an Attribute, where a datasetId is duplicated, but there are differences in the other attribute data, the one with the most recent observedAt DateTime, if present, and otherwise the one with the most recent modifiedAt DateTime shall be provided. + +1. Create Operation +========================= +In order to create the entity with the multi-value attribute, we can hit the endpoint **http://:/ngsi-ld/v1/entities/** with the given payload. + +.. code-block:: JSON + + { + "id":"urn:ngsi-ld:Vehicle:A135", + "type":"Vehicle", + "brandName":{ + "type":"Property", + "value":"Mercedes" + }, + "speed":[{ + "type":"Property", + "value": 55, + "datasetId": "urn:ngsi-ld:Property:speedometerA4567-speed", + "source":{ + "type":"Property", + "value": "Speedometer" + } + }, + { + "type":"Property", + "value": 11, + "datasetId": "urn:ngsi-ld:Property:gpsA4567-speed", + "source":{ + "type":"Property", + "value": "GPS" + } + }, + { + "type":"Property", + "value": 10, + "source":{ + "type":"Property", + "value": "CAMERA" + } + }] + } + +2. Update Operation +====================== +- **Update the attribute instance value based on datasetId** + + We can update the value of the particular instance by sending the datasetId in the body and making the PATCH request at **http://:/ngsi-ld/v1/entities/entityId/attrs/attrsId** + +.. code-block:: JSON + + { + "value":"27", + "datasetId":"urn:ngsi-ld:Property:speedometerA4567-speed" + } + + +- **Update the default attribute instance value based on attribute name** + + We can update the value of the default instance by making the PATCH request at **http://:/ngsi-ld/v1/entities/entityId/attrs/attrsId** with only updated value in the payload. + +.. code-block:: JSON + + { + "value":"27" + } + +3. Delete Operation +===================== + +- **Delete the default attribute instance** + + In order to delete the default attribute instance, make the DELETE request with URL **http://:/ngsi-ld/v1/entities/entityId/attrs/attrsId** this will delete the default instance of the attribute. + +- **Delete the attribute instance with datasetId** + + To delete the particular attribute instance, make a DELETE request with URL **http://:/ngsi-ld/v1/entities/entityId/attrs/attrsId?datasetId={{datasetId}}** where datasetId is the id of the instance which we require to be deleted. + +- **Delete all the attribute instance with the given attribute name** + + If we want to delete all the attribute instance with the given attribute name, then we need to make DELETE request with the URL **http://:/ngsi-ld/v1/entities/entityId/attrs/attrsId?deleteAll=true** . + +4. Query Operation +===================== + In order to retrieve the entity details, make a GET request with URL **http://:/ngsi-ld/v1/entities/**, and we will get all the instance of the required attribute. \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/onepageTutorial.rst b/scorpio-broker/docs/en/source/onepageTutorial.rst new file mode 100644 index 0000000000000000000000000000000000000000..8363101ca7ef0ffff2e70b0a37eb2f85e648983a --- /dev/null +++ b/scorpio-broker/docs/en/source/onepageTutorial.rst @@ -0,0 +1,251 @@ +*********** +Basic Guide +*********** + +Architectural Overview +###################### +Scorpio Broker is a reference implementation of NGSI-LD APIs. Scorpio Broker provides an implementation of REST API endpoints for various data context operations that conform to NGSI-LD API specification. Scorpio Broker component has been implemented based on modular, Microservices oriented, scalable, secure by design, easy to monitor/debug, fault-tolerant, and highly available architecture. Scorpio Broker based on NGSI-LD offers a unique feature of Link data context that provides self-contained (or referenced) dynamic schema definition (i.e. the context) for contained data in each message/entity. Thus allows the Scorpio Broker core processing to still remain unified even it gets dynamic context-driven data as its input from different types of data sources coupled(or designed for) with different schemas. + +.. figure:: figures/architecture.png + +The basic architecture of the Scorpio Broker consists of five layers, the first layer consists of the Scorpio Broker clients which act as the producers and consumers. The second layer act as an interface between the Scorpio Broker and the external world this layer comprises the NGSI-LD Compliant Interfaces, Service API Gateway, and Service Discovery & Registration. The third layer contains all the micro-services and is responsible for the majority of tasks like entity CRUD operations etc. The fourth layer acts as the interface which connects different micro-services from the storage. The fifth layer is a Resources layer which acts as the storage for Scorpio Broker. + +****************** +System Setup Guide +****************** + +In order to set-up the environment of Scorpio broker, the following dependency needs to be configured:- + +1. Server JDK. +2. Apache Kafka. +3. PostgreSQL + + +Windows +####### + +JDK Setup +********* + +- Start the JDK installation and hit the “Change destination folder†checkbox, then click 'Install.' + +**Note:-** Recommended version is JDK-11. Scorpio Broker is developed and tested with this version only. + +.. figure:: figures/jdk-1.png + +- Change the installation directory to any path without spaces in the folder name. + + +After you've installed Java in Windows, you must set the JAVA_HOME environment variable to point to the Java installation directory. + +**Set the JAVA_HOME Variable** + +To set the JAVA_HOME variable: + +1. Find out where Java is installed. If you didn't change the path during installation, it will be something like this: + + *C:\Program Files\Java\jdk1.version* + +2. - In Windows 7 right-click **My Computer** and select **Properties** > **Advanced**. + + OR + + - In Windows 8 go to **Control Panel** > **System** > **Advanced System Settings**. + +3. Click the Environment Variables button. + +4. Under System Variables, click New. + +5. In the User Variable Name field, enter: **JAVA_HOME** + +6. In the User Variable Value field, enter your JDK path. + + (Java path and version may change according to the version of Kafka you are using) + +7. Now click OK. + +8. Search for a Path variable in the “System Variable†section in the “Environment Variables†dialogue box you just opened. + +9. Edit the path and type *;%JAVA_HOME%\bin* at the end of the text already written there, just like the image below: + +.. figure:: figures/jdk-3.png + + +- To confirm the Java installation, just open cmd and type “java –version.†You should be able to see the version of Java you just installed. + +.. figure:: figures/jdk-4.png + +If your command prompt somewhat looks like the image above, you are good to go. Otherwise, you need to recheck whether your setup version matches the correct OS architecture (x86, x64), or if the environment variables path is correct. + + +Setting Up Kafka +**************** + +1. Go to your Kafka config directory. For example:- **C:\kafka_2.12-2.1.0\config** +2. Edit the file “server.properties.†+3. Find and edit the line log.dirs=/tmp/kafka-logs†to “log.dir= C:\kafka_2.11-0.9.0.0\kafka-logs. +4. If your ZooKeeper is running on some other machine or cluster you can edit “zookeeper.connect:2181†to your custom IP and port. For this demo, we are using the same machine so there's no need to change. Also the Kafka port and broker.id are configurable in this file. Leave other settings as is. +5. Your Kafka will run on default port 9092 and connect to ZooKeeper’s default port, 2181. + +**Note**: For running Kafka, zookeepers should run first. At the time of closing Kafka, zookeeper should be closed first than Kafka. Recommended version of kafka is kafka_2.12-2.1.0. + + +Running a Kafka Server +********************** + +Important: Please ensure that your ZooKeeper instance is up and running before starting a Kafka server. + +1. Go to your Kafka installation directory:** C:\kafka_2.11-0.9.0.0\** +2. Open a command prompt here by pressing Shift + right-click and choose the “Open command window here†option). +3. Now type **.\bin\windows\kafka-server-start.bat .\config\server.properties** and press Enter,then +4. Type **.\bin\windows\kafka-server-start.bat .\config\server.properties** in new command window and hit enter. + +Setting up PostgreSQL +********************* + +Step 1) Go to https://www.postgresql.org/download. + +**Note**: Recommended version is postgres 10. + +Step 2) You are given two options:- + + 1. Interactive Installer by EnterpriseDB + 2. Graphical Installer by BigSQL + +BigSQL currently installs pgAdmin version 3 which is deprecated. It's best to choose EnterpriseDB which installs the latest version 4 + + +Step 3) + + 1. You will be prompted to the desired Postgre version and operating system. Select the Postgres 10, as Scorpio has been tested and developed with this version. + + 2. Click the Download Button, Download will begin + +Step 4) Open the downloaded .exe and Click next on the install welcome screen. + + +Step 5) + + 1. Change the Installation directory if required, else leave it to default + + 2.Click Next + + +Step 6) + + 1. You can choose the components you want to install in your system. You may uncheck Stack Builder + + 2. Click on Next + + +Step 7) + + 1. You can change the data location + + 2.Click Next + + +Step 8) + + 1. Enter the superuser password. Make a note of it + + 2.Click Next + + +Step 9) + + 1. Leave the port number as the default + + 2.Click Next + + +Step 10) + + 1. Check the pre-installation summary. + + 2.Click Next + +Step 11) Click the next button + +Step 12) Once install is complete you will see the Stack Builder prompt + + 1. Uncheck that option. We will use Stack Builder in more advance tutorials + + 2.Click Finish + +Step 13) To launch Postgre go to Start Menu and search pgAdmin 4 + +Step 14) You will see pgAdmin homepage + +Step 15) Click on Servers > Postgre SQL 10 in the left tree + +.. figure:: figures/dbconfig-1.png + +Step 16) + + 1.Enter superuser password set during installation + + 2. Click OK + +Step 17) You will see the Dashboard + +.. figure:: figures/dbconfig-2.png + +That's it to Postgre SQL installation. + +Linux +##### + +JDK Setup +********* + +To create a Java environment in your machine install the JDK, for this open the terminal, and run the following commands:- + +1. sudo apt-get update + +2. sudo apt-get install openjdk-8-jdk + +To check that JDK is properly installed in your machine, run the command **java -version** in your terminal if it returns the version of the JDK as 11 then it's working fine. + +.. figure:: figures/javaTerminal + +Setting Up Kafka +**************** + +To download the Apache Kafka in your machine run the following commands one by one in your terminal. + +1. mkdir kafka +2. cd kafka +3. wget https://archive.apache.org/dist/kafka/2.2.0/kafka_2.12-2.2.0.tgz +4. tar -xzf kafka_2.12-2.2.0.tgz + +Once the Kafka is downloaded in your machine hit the following commands to get it run + +1. kafka_2.12-2.2.0/bin/zookeeper-server-start.sh kafka_2.12-2.2.0/config/zookeeper.properties > /dev/null 2>&1 & +2. kafka_2.12-2.2.0/bin/kafka-server-start.sh kafka_2.12-2.2.0/config/server.properties > /dev/null 2>&1 & + +Setting up PostgreSQL +********************* + +In order to download the PostgreSQL in your machine run the following commands from your terminal. + +1. sudo apt update +2. sudo apt-get install postgresql-10 +3. service postgresql status + +The last command will give us the status of the PostgreSQL four your machine if this matches to +one in the picture then everything is properly installed else re-run the commands. +.. figure:: figures/postgresTerminal + +Once PostgreSQL is successfully installed in your machine create the database **ngb** and change its role by running the following commands: + +1. psql -U postgres -c "create database ngb;" +2. psql -U postgres -c "create user ngb with password 'ngb';" +3. psql -U postgres -c "alter database ngb owner to ngb;" +4. psql -U postgres -c "grant all privileges on database ngb to ngb;" +5. psql -U postgres -c "alter role ngb superuser;" +6. sudo apt install postgresql-10-postgis-2.4 +7. sudo apt install postgresql-10-postgis-scripts +8. sudo -u postgres psql -U postgres -c "create extension postgis; + +After this your PostgreSql is ready to use for Scorpio Boker. diff --git a/scorpio-broker/docs/en/source/relationships-linked-data.md b/scorpio-broker/docs/en/source/relationships-linked-data.md new file mode 100644 index 0000000000000000000000000000000000000000..bfe0a74e810e8a8a2c3c54de95425487d6a931bc --- /dev/null +++ b/scorpio-broker/docs/en/source/relationships-linked-data.md @@ -0,0 +1,951 @@ +[![FIWARE Core Context Management](https://nexus.lab.fiware.org/repository/raw/public/badges/chapters/core.svg)](https://github.com/FIWARE/catalogue/blob/master/core/README.md) +[![NGSI LD](https://img.shields.io/badge/NGSI-LD-d6604d.svg)](https://www.etsi.org/deliver/etsi_gs/CIM/001_099/009/01.01.01_60/gs_CIM009v010101p.pdf) +[![JSON LD](https://img.shields.io/badge/JSON--LD-1.1-f06f38.svg)](https://w3c.github.io/json-ld-syntax/) + +**Description:** This tutorial discusses relationships between linked data entities and how the concepts of **JSON-LD** +and **NGSI-LD** can be used to interrogate entities and navigate from one entity to another. The tutorial discusses a +series of simple linked-data data models based around the supermarket chain’s store finder application, and demonstrates +how to design models holding one-to-one, one-to-many and many-to-many relationships. + +The tutorial uses [cUrl](https://ec.haxx.se/) commands throughout, but is also available as +[Postman documentation](https://fiware.github.io/tutorials.Relationships-Linked-Data/) + +[![Run in Postman](https://run.pstmn.io/button.svg)](https://github.com/ScorpioBroker/ScorpioBroker/blob/feature-80-temp/docs/en/source/Payloads/FIWARE%20Relationships%20using%20Linked%20Data.postman_collection.json) + +
+ +# Relationships in Linked Data + +> “It’s hard to communicate anything exactly and that’s why perfect relationships between people are difficult to find.†+> +> ― Gustave Flaubert, L'Éducation sentimentale + +All NGSI data entity attributes can be divided into one of two types. + +- _Property_ attributes +- _Relationship_ attributes + +For each entity, the _Property_ attributes (including various subtypes such as _GeoProperty_ , _TemporalProperty_ and +time values) define the current state something in the real world. As the state of the entity changes the `value` of +each _Property_ is updated to align with the last real world reading of the attribute. All _Property_ attributes relate +to the state of a single entity. + +_Relationship_ attributes correspond to the interactions **between** entities (which are expected to change over time). +They effectively provide the graph linking the nodes of the data entities together. Each _Relationship_ attribute holds +an `object` in the form of a URN - effectively a pointer to another object. _Relationship_ attributes do not hold data +themselves. + +Both properties and relationships may in turn have a linked embedded structure (of _properties-of-properties_ or +_properties-of-relationships or relationships-of-properties_ or _relationships-of-relationships_ etc.) which lead a full +complex knowledge graph. + +## Designing Data Models using JSON-LD + +In order for computers to be able to navigate linked data structures, proper ontologically correct data models must be +created and a full `@context` must be defined and made accessible. We can do this by reviewing and updating the existing +data models from the NGSI v2 [Entity Relationships](https://github.com/FIWARE/tutorials.Entity-Relationships) tutorial. + +### Revision: Data Models for a Stock management system as defined using NGSI-v2 + +As a reminder, four types of entity were created in the NGSI v2 stock management system. The relationship between the +four NGSI v2 entity models was defined as shown below: + +![](https://fiware.github.io/tutorials.Relationships-Linked-Data/img/entities-v2.png) + +More details can be found in the NGSI v2 [Entity Relationships](entity-relationships.md) tutorial. + +In NGSI v2 relationship attributes are just standard properties attributes. By convention NGSI v2 relationship +attributes are given names starting `ref` and are defined using the `type="Relationship"`. However, this is merely +convention and may not be followed in all cases. There is no infallible mechanism for detecting which attributes are +associative relationships between entities. + +### Data Models for a Stock management system defined using NGSI-LD + +The richer [JSON-LD](https://json-ld.org/spec/FCGS/json-ld/20130328) description language is able to define NGSI-LD +entities by linking entities directly as shown below. + +![](https://fiware.github.io/tutorials.Relationships-Linked-Data/img/entities-ld.png) + +The complete data model must be understandable by both developers and machines. + +- A full Human readable definition of this data model can be found + [online](https://fiware.github.io/tutorials.Step-by-Step/schema). +- The machine readable JSON-LD definition can be found at + [`https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld`](https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld) - + this file will be used to provide the `@context` to power our NGSI-LD data entities. + +Four data models have been created for this NGSI-LD stock management system. The relationships between the models are +described below: + +- The [**Store** model](https://fiware.github.io/tutorials.Step-by-Step/schema/Store/) is now based on and extends the + FIWARE + [**Building** model](https://fiware-datamodels.readthedocs.io/en/latest/Building/Building/doc/spec/index.html). This + ensures that it offers standard properties for `name`, `address` and category. + - A Building will hold `furniture` this is a 1-many relationship. + - Building âž¡ Shelf. +- The [**Shelf** model](https://fiware.github.io/tutorials.Step-by-Step/schema/Shelf/) is a custom data model defined + for the tutorial + - Each **Shelf** is `locatedIn` a **Building**. This is a 1-1 relationship. It is the reciprocal relationship to + `furniture` defined above. + - Shelf âž¡ Building. + - A **Shelf** is `installedBy` a **Person** - this is a 1-1 relationship. A shelf knows who installed it, but it + is this knowledge is not part of the Person entity itself. + - Shelf âž¡ Person + - A **Shelf** `stocks` a given **Product**. This is another 1-1 relationship, and again it is not reciprocated. A + **Product** does not know which **Shelf** it is to be found on. + - Shelf âž¡ Product +- A [**StockOrder** model](https://fiware.github.io/tutorials.Step-by-Step/schema/StockOrder/) replaces the + **Inventory Item** bridge table defined for NGSI v2 : + - A **StockOrder** is `requestedBy` a **Person** - this is a 1-1 relationship. + - StockOrder âž¡ Person. + - A **StockOrder** is `requestedFor` a **Building** - this is a 1-1 relationship. + - StockOrder âž¡ Building. + - A **StockOrder** is a request for a specific `orderedProduct` - this is a 1-1 relationship. + - StockOrder âž¡ Product. +- The [**Product** model](https://fiware.github.io/tutorials.Step-by-Step/schema/Product/) remains unchanged. It has + no relationships of its own. + +Additionally some relationships have been defined to be linked to `https://schema.org/Person` entities. This could be +outlinks to a separate HR system for example. + +## Comparison between Linked and Non-Linked Data Systems + +Obviously within a single isolated Smart System itself, it makes no difference whether a rich, complex linked-data +architecture is used or a simpler, non-linked-data system is created. However if the data is designed to be shared, then +linked data is a requirement to avoid data silos. An external system is unable to "know" what relationships are unless +they have been provided in a machine readable form. + +### Video: Rich Snippets: Product Search + +A simple example of an external system interrogating for structured data can be found in online product search. Machines +from third parties such as Google are able to read product information (encoded using a standard +[**Product** data model](https://jsonld.com/product/)) and display a rich snippet of product information with a standard +star rating. + +[![](https://fiware.github.io/tutorials.Step-by-Step/img/video-logo.png)](https://www.youtube.com/watch?v=_-rRxKSm2ic "Rich Snippets") + +Click on the image above to watch an introductory video on rich snippets for product search. + +Further machine readable data model examples can be found on the [Steal Our JSON-LD](https://jsonld.com/) site. + +## Traversing relationships + +> **Example**: Imagine the scenario where a pallet of Products are moved from stock in the warehouse (`stockCount`) onto +> the shelves of the store (`storeCount`) . How would NGSI v2 and NGSI-LD computations differ? + +### Relationships without Linked Data + +Without linked data, there is no machine readable way to connect entities together. Every data relationship must be +known in advanced somehow. Within an isolated Smart System this is not an issue, since the architect of the system will +know in advance _what-connects-to-what_. + +For example in the simple NGSI v2 Entity Relationships tutorial, a convenience bridge table **InventoryItem** entity had +been created specifically to hold both count on the shelf and count in the warehouse in a single entity. In any +computation only the **InventoryItem** entity would be involved. The `stockCount` value would be decremented and the +`shelfCount` value would incremented. In the NGSI v2 model both the `storeCount` and the `shelfCount` have been placed +into the conceptual **InventoryItem** Entity. This is a necessary workaround for NGSI v2 and it allows for simpler data +reading and data manipulation. However technically it is ontologically incorrect, as there is no such thing as an +**InventoryItem** in the real world, it is really two separate ledgers, products bought for the store and products sold +on the shelf, which in turn have an indirect relationship. + +Since the entity data is not yet machine readable externally, the programmer is free to design models as she sees fit +and can decide to update two attributes of one **InventoryItem** Entity or two separate attributes on two separate +**Shelf** and **StockOrder** entities without regards as to whether these really are real concrete items in the real +world. However this means **external systems** cannot discover information for themselves and must be pre-programmed to +know where information is held. + +### Relationships with Linked Data + +With a well defined data model using linked data, every relationship can be predefined in advance and is discoverable. +Using [JSON-LD](https://json-ld.org/spec/FCGS/json-ld/20130328) concepts (specifically `@graph` and `@context`) it is +much easier for computers to understand indirect relationships and navigate between linked entities. Due to hese +additional annotations it is possible to create usable models which are ontologically correct and therefore **Shelf** +can now be directly assigned a `numberOfItems` attribute and bridge table concept is no longer required. This is +necessary as other systems may be interrogating **Shelf** directly. + +Similarly a real **StockOrder** Entity can be created which holds a entry of which items are currently on order for each +store. This is a proper context data entity as `stockCount` describes the current state of a product in the warehouse. +Once again this describes a single, real world entity and is ontologically correct. + +Unlike the NGSI v2 scenario, with linked data, it would be possible for an **external system** to discover relationships +and interrogate our Supermarket. Imagine for example, an +[Autonomous Mobile Robot](https://www.intorobotics.com/40-excellent-autonomous-mobile-robots-on-wheels-that-you-can-build-at-home/) +system which is used to move a pallet of products onto a shelf it would be possible for this **external system** to +"know" about our supermarket by navigating the relationships in the linked data the `@graph` from **StockOrder** to +**Shelf** as shown: + +- Some `product:XXX` items have been removed from `stockOrder:0001` - decrement `stockCount`. +- Interogating the **StockOrder** is discovered that the **Product** is `requestedFor` for a specific URI e.g. + `store:002` + +```json + "@graph": [ + { + "@id": "tutorial:orderedProduct", + "@type": "https://uri.etsi.org/ngsi-ld/Relationship", + "schema:domainIncludes": [{"@id": "tutorial:StockOrder"}], + "schema:rangeIncludes": [{"@id": "tutorial:Product"}], + "rdfs:comment": "The Product ordered for a store", + "rdfs:label": "orderedProduct" + }, + ...etc +] +``` + +- It is also discovered from the **StockOrder** model that the `requestedFor` URI defines a **Building** + +```json + "@graph": [ + { + "@id": "tutorial:requestedFor", + "@type": "https://uri.etsi.org/ngsi-ld/Relationship", + "schema:domainIncludes": [{"@id": "tutorial:StockOrder"}], + "schema:rangeIncludes": [{"@id": "fiware:Building"}], + "rdfs:comment": "Store for which an item is requested", + "rdfs:label": "requestedFor" + }, + ...etc +] +``` + +- It is discovered from the **Building** model that every **Building** contains `furniture` as an array of URIs. +- It is discovered from the **Building** model that these URIs represent **Shelf** units + +```json +"@graph": [ + { + "@id": "tutorial:furniture", + "@type": "https://uri.etsi.org/ngsi-ld/Relationship", + "schema:domainIncludes": [{"@id": "fiware:Building"}], + "schema:rangeIncludes": [{"@id": "tutorial:Shelf"}], + "rdfs:comment": "Units found within a Building", + "rdfs:label": "furniture" + }, + ...etc +] +``` + +- It is discovered from the **Shelf** model that the `stocks` attribute holds a URI representing **Product** items. + +```json +"@graph": [ + { + "@id": "tutorial:stocks", + "@type": "https://uri.etsi.org/ngsi-ld/Relationship", + "schema:domainIncludes": [{"@id": "tutorial:Shelf"}], + "schema:rangeIncludes": [{"@id": "tutorial:Product"}], + "rdfs:comment": "The product found on a shelf", + "rdfs:label": "stocks" + }, + ...etc +] +``` + +- A request the **Shelf** unit which holds the correct **Product** for the `stocks` attribute is made and the Shelf + `numberOfItems` attribute can be incremented. + +Through creating and using standard data models and describing the linked data properly, it would not matter to the +robot if the underlying system were to change, provided that the Properties and Relationships resolve to fully qualified +names (FQNs) and a complete `@graph`. For example the JSON short name attributes could be amended or the relationships +redesigned but their real intent (which resolves to a fixed FQN) could still be discovered and used. + +# Architecture + +Our demo application will only make use of one FIWARE component - the +[Scorpio Broker](https://scorpio.readthedocs.io/en/latest/). Usage of the Scorpio Context Broker (with proper +context data flowing through it) is sufficient for an application to qualify as _“Powered by FIWAREâ€_. + +![Deployment Architecture](figures/deploymentarchitecture.png) + +The deployment architecture leverages the Spring Cloud framework that addresses lots of Micro-services concerns (e.g. scaling, monitoring, fault-tolerant, highly available, secure, decoupled, etc. ) and Kafka based distributed and scalable message queue infrastructure to provide high performance on message processing for a huge number of context requests which is usual in the IoT domain. +The deployment architecture covers the high-level operations (Http based REST with method POST/GET/DELETE/PATCH) request flow from the external world to the Scorpio Broker system. The external request is served through a unified service API gateway interface that exposes a single IP/port combination to be used for all services that the Scorpio Broker system can provide. +In reality, each of the Scorpio Broker services will be implemented as a micro-service that can be deployed as an independent standalone unit in adistributed computing environment. + +The necessary configuration information can be seen in the services section of the associated `docker-compose.yml` file: + + +

Creating and Associating Data Entities

+ +## Reviewing existing entities + +On start up, the system is brought up with a series of **Building**, **Product** and **Shelf** entities already present. +You can query for them using the requests below. In each case only the _Properties_ of the entities have been created. + +To avoid ambiguity, computers prefer to use unique IDs when referring to well defined concepts. For each of the NGSI-LD +entities returned, the names of the attributes received can be defined as either as a fully qualified name (FQN) or as +simple JSON attributes dependent upon whether the associated `Link` header connecting the NGSI-LD Data Entity to the +computer readable JSON-LD `@context` Data Models is included in the request. + +### Display all Buildings + +The Stores of the supermarket have been created using the FIWARE +[**Building** model](https://fiware-datamodels.readthedocs.io/en/latest/Building/Building/doc/spec/index.html) and the +enumerated value of this type is `fiware:Building` which expands to `https://uri.fiware.org/ns/datamodels%23Building`. +It is therefore possible to request all building entities without supplying a known context. + +#### 1 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities' \ + -d 'type=https://uri.fiware.org/ns/datamodels%23Building&options=keyValues' +``` + +#### Response: + +The response returns all of the existing **Building** entities, with the attributes expanded as fully qualified names +(FQNs). + +```json +[ + { + "@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + "id": "urn:ngsi-ld:Building:store001", + "type": "https://uri.fiware.org/ns/datamodels#Building", + "name": "Bösebrücke Einkauf", + "https://schema.org/address": { + "streetAddress": "Bornholmer Straße 65", + "addressRegion": "Berlin", + "addressLocality": "Prenzlauer Berg", + "postalCode": "10439" + }, + "https://uri.fiware.org/ns/datamodels#category": ["commercial"], + "location": { + "type": "Point", "coordinates": [13.3986, 52.5547] + } + }, + { + "@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + "id": "urn:ngsi-ld:Building:store002", + "type": "https://uri.fiware.org/ns/datamodels#Building", + "name": "Checkpoint Markt", + "https://schema.org/address": { + "streetAddress": "Friedrichstraße 44", + "addressRegion": "Berlin", + "addressLocality": "Kreuzberg", + "postalCode": "10969" + }, + "https://uri.fiware.org/ns/datamodels#category": ["commercial"], + "location": { + "type": "Point", "coordinates": [13.3903, 52.5075] + } + }, + ... etc +``` + +According to the [defined data model](https://fiware.github.io/tutorials.Step-by-Step/schema/Store/): + +- The `type` attribute has the FQN `https://uri.etsi.org/ngsi-ld/type` +- The `name` attribute has the FQN `https://uri.etsi.org/ngsi-ld/name` +- The `location` attribute has the FQN `https://uri.etsi.org/ngsi-ld/location` +- The `address` attribute has the FQN `http://schema.org/address` +- The `category` attribute has the FQN `https://uri.fiware.org/ns/datamodels#category` + +`type`, `name` and `location` are defined in the NGSI-LD Core Context: +[`https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld`](https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld). +The other attributes are defined using the Tutorial's own Context: +[`https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld`](https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld). +Both `category` and `address` are _common_ attributes the definitions of which are brought in from the FIWARE data +models and `schema.org` respectively. + +### Display all Products + +Requesting the **Product** entities can be done by supplying the FQN of the entity `type` in the request as well. + +#### 2 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities' \ + -d 'type=https://fiware.github.io/tutorials.Step-by-Step/schema/Product' \ + -d 'options=keyValues' \ + -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' +``` + +#### Response: + +However since the full context has been supplied in the `Link` header, the short names are returned. + +```json +[ + { + "@context": "https://fiware.github.io/tutorials.Step-by-Step/datamodels-context.jsonld", + "id": "urn:ngsi-ld:Product:001", + "type": "Product", + "name": "Beer", + "price": 0.99, + "size": "S" + }, + { + "@context": "https://fiware.github.io/tutorials.Step-by-Step/datamodels-context.jsonld", + "id": "urn:ngsi-ld:Product:002", + "type": "Product", + "name": "Red Wine", + "price": 10.99, + "size": "M" + }, + .. etc +``` + +According to the [defined data model](https://fiware.github.io/tutorials.Step-by-Step/schema/Product/): + +- The `type` attribute has the FQN `https://uri.etsi.org/ngsi-ld/type` +- The `name` attribute has the FQN `https://uri.etsi.org/ngsi-ld/name` +- The `price` attribute has the FQN `https://fiware.github.io/tutorials.Step-by-Step/schema/price` +- The `size` attribute has the FQN `https://fiware.github.io/tutorials.Step-by-Step/schema/size` +- The `currency` attribute has the FQN `https://fiware.github.io/tutorials.Step-by-Step/schema/currency` + +The programmatically the Product model and its attributes are fully described in the +[`https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld`](https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld) + +### Display all Shelves + +Requesting the **Product** entities can be done by supplying the short of the entity `type` in the request as well, +provided the full context has been supplied in the `Link` header. + +#### 3 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities' \ + -d 'type=Shelf' \ + -d 'options=keyValues' \ + -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' +``` + +#### Response: + +Once again the short names are returned. + +```json +[ + { + "@context": "https://fiware.github.io/tutorials.Step-by-Step/datamodels-context.jsonld", + "id": "urn:ngsi-ld:Shelf:unit001", + "type": "Shelf", + "name": "Corner Unit", + "maxCapacity": 50, + "location": { + "type": "Point", "coordinates": [13.398611, 52.554699] + } + }, + { + "@context": "https://fiware.github.io/tutorials.Step-by-Step/datamodels-context.jsonld", + "id": "urn:ngsi-ld:Shelf:unit002", + "type": "Shelf", + "name": "Wall Unit 1", + "maxCapacity": 100, + "location": { + "type": "Point", "coordinates": [13.398722, 52.554664] + } + }, + ... etc +``` + +According to the [defined data model](https://fiware.github.io/tutorials.Step-by-Step/schema/Shelf/): + +- The `type` attribute has the FQN `https://uri.etsi.org/ngsi-ld/type` +- The `name` attribute has the FQN `https://uri.etsi.org/ngsi-ld/name` +- The `location` attribute has the FQN `https://uri.etsi.org/ngsi-ld/location` +- The `maxCapacity` attribute has the FQN `https://fiware.github.io/tutorials.Step-by-Step/schema/maxCapacity` +- The `numberOfItems` attribute has the FQN `https://fiware.github.io/tutorials.Step-by-Step/schema/numberOfItems` + +The programmatically the Shelf model and its attributes are fully described in the +[`https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld`](https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld) + +### Obtain Shelf Information + +Initially each shelf is created with `name`, `maxCapacity` and `location` _Properties_ only. A sample shelf is requested +below. + +#### 4 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Shelf:unit001/' \ + -d 'options=keyValues' \ + -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' +``` + +#### Response: + +The short names have been returned since the `@context` has been supplied in the `Link` header. + +```json +{ + "@context": "https://fiware.github.io/tutorials.Step-by-Step/datamodels-context.jsonld", + "id": "urn:ngsi-ld:Shelf:unit001", + "type": "Shelf", + "name": "Corner Unit", + "maxCapacity": 50, + "location": { + "type": "Point", + "coordinates": [13.398611, 52.554699] + } +} +``` + +## Creating Relationships + +To complete the data model within the data model, various additional _Properties_ and _Relationships_ need to be added +to the entity. + +A **Shelf** holds a `numberOfItems` - this is a `Property` of the **Shelf** and contains a `value` representing the +number of Items. The `value` of this _Property_ (i.e. the number of Items will change over time). _Properties_ have been +covered in a [previous tutorial](linked-data.md) and will not be covered in detail here. + +A **Shelf** `stocks` a given **Product** - this is a `Relationship` of the **Shelf** Only the URN of the product is +known by the **Shelf** entity - effectively it points to further information held elsewhere. + +To distinguish _Relationships_, they must be given `type="Relationship"` and each _Relationship_ has must have an +`object` sub-attribute, this contrasts with _Properties_ which must a `type="Property"` have a `value` attribute. The +`object` sub-attribute holds the reference to the related entity in the form of a URN. + +A **Shelf** is `locatedIn` a given **Building**. Once again this is a `Relationship` of the **Shelf**. The URN of the +**Building** is known by the **Shelf** entity, but further information is also available: + +- `locatedIn[requestedBy]` is a _Relationship-of-a-Relationship_, this sub-attribute in turn holds an `object` + attribute of its own pointing to a **Person** +- `locatedIn[installedBy]` is a _Relationship-of-a-Relationship_, this sub-attribute in turn holds an `object` + attribute of its own pointing to a **Person** +- `locatedIn[statusOfWork]` is a _Property-of-a-Relationship_, this sub-attribute in turn holds an `value` attribute + holding the current status of the `locatedIn` action. + +As you can see, it is possible to embed further _Properties_ (with a corresponding `value`) or _Relationships_ (with a +corresponding `object`) inside the entity structure to provide a rich graph of information + +### Adding 1-1 Relationships + +Within the `@context` a **Shelf** has been predefined with two relationships. (`stocks` and `locatedIn`) + +To create a relationship add a new attribute with `type=Relationship` and an associated object attribute. Metadat about +the relationships (e.g. `requestedBy`, `installedBy`)can be created by adding subattributes to the relationship. The +value of object is the URN corresponding to the linked data entity. + +Note that the relationship is currently unidirectional. **Shelf** ➡ **Building**. + +#### 5 Request: + +```bash +curl -X POST \ + http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Shelf:unit001/attrs \ + -H 'Content-Type: application/ld+json' \ + -H 'fiware-servicepath: /' \ + -d '{ + "numberOfItems": {"type": "Property","value": 50}, + "stocks": { + "type": "Relationship", + "object": "urn:ngsi-ld:Product:001" + }, + "locatedIn" : { + "type": "Relationship", "object": "urn:ngsi-ld:Building:store001", + "requestedBy": { + "type": "Relationship", + "object": "urn:ngsi-ld:Person:bob-the-manager" + }, + "installedBy": { + "type": "Relationship", + "object": "urn:ngsi-ld:Person:employee001" + }, + "statusOfWork": { + "type": "Property", + "value": "completed" + } + }, + "@context": [ + "https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld", + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld" + ] +}' +``` + +### Obtain the Updated Shelf + +Having added the additional attributes, it is possible to query for the amended entity. + +This example returns the context data of the Shelf entity with the `id=urn:ngsi-ld:Shelf:unit001`. + +#### 6 Request: + +```bash +curl -X GET \ + http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Shelf:unit001 +``` + +#### Response: + +There are now two additional relationship attributes present `stocks` and `locatedIn`. Both entries have been expanded +as fully qualified names (FQNs), as defined in the +[**Shelf** Data Model](https://fiware.github.io/tutorials.Step-by-Step/schema/Shelf/) as the `Link` header was not +passed in the previous request. + +```json +{ + "@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + "id": "urn:ngsi-ld:Shelf:unit001", + "type": "https://fiware.github.io/tutorials.Step-by-Step/schema/Shelf", + "name": { + "type": "Property", + "value": "Corner Unit" + }, + "https://fiware.github.io/tutorials.Step-by-Step/schema/locatedIn": { + "type": "Relationship", + "object": "urn:ngsi-ld:Building:store001", + "installedBy": { + "type": "Relationship", + "object": "urn:ngsi-ld:Person:employee001" + }, + "requestedBy": { + "type": "Relationship", + "object": "urn:ngsi-ld:Person:bob-the-manager" + }, + "statusOfWork": { + "type": "Property", + "value": "completed" + } + }, + "https://fiware.github.io/tutorials.Step-by-Step/schema/maxCapacity": { + "type": "Property", + "value": 50 + }, + "https://fiware.github.io/tutorials.Step-by-Step/schema/numberOfItems": { + "type": "Property", + "value": 50 + }, + "https://fiware.github.io/tutorials.Step-by-Step/schema/stocks": { + "type": "Relationship", + "object": "urn:ngsi-ld:Product:001" + }, + "location": { + "type": "GeoProperty", + "value": { + "type": "Point", + "coordinates": [13.398611, 52.554699] + } + } +} +``` + +For example, this means that `https://fiware.github.io/tutorials.Step-by-Step/schema/locatedIn` is a well-defined +relationship within our linked data JSON-LD schema. + +### How is the relationship's Fully Qualified Name created ? + +One of the central motivations of JSON-LD is making it easy to translate between different representations of what are +fundamentally the same data types. In this case, the short hand `locatedIn` refers to the unique and computer readable +`https://fiware.github.io/tutorials.Step-by-Step/schema/locatedIn` + +To do this NGSI-LD uses the two core expansion and compaction algorithms of the underlying JSON-LD model. + +Looking at the relevant lines in the JSON-LD `@context`: + +```json + "tutorial": "https://fiware.github.io/tutorials.Step-by-Step/schema/", + + "Shelf": "tutorial:Shelf", + + "locatedIn": { + "@id": "tutorial:locatedIn", + "@type": "@id" + }, +``` + +You can see that `tutorial` has been mapped to the string `https://fiware.github.io/tutorials.Step-by-Step/schema/` and +`locatedIn` has been mapped to `tutorial:locatedIn` which using + +Furthermore, `locatedIn` has an `@type="@id"` which indicates to a computer that its underlying value is a URN. + +### Video: JSON-LD Compaction & Expansion + +[![](https://fiware.github.io/tutorials.Step-by-Step/img/video-logo.png)](https://www.youtube.com/watch?v=Tm3fD89dqRE "JSON-LD Compaction & Expansion") + +Click on the image above to watch a video JSON-LD expansion and compaction with reference to the `@context`. + +### What other relationship information can be obtained from the data model? + +More information about `Relationships` can be obtained from the `@graph` of the linked data model. For `locatedIn` the +relevant section definition is as follows: + +```json + { + "@id": "tutorial:locatedIn", + "@type": "https://uri.etsi.org/ngsi-ld/Relationship", + "schema:domainIncludes": [{"@id": "tutorial:Shelf"}], + "schema:rangeIncludes": [{"@id": "fiware:Building"}], + "rdfs:comment": "Building in which an item is found", + "rdfs:label": "located In" + }, +``` + +This indicates a lot of additional information about the `locatedIn` _Relationship_ in a computer readable fashion: + +- `locatedIn` is really an NGSI-LD relationship (i.e. it has the FQN `https://uri.etsi.org/ngsi-ld/Relationship`) +- `locatedIn` is only used on **Shelf** entities +- `locatedIn` only points to **Building** entities +- `locatedIn` can be defined for humans as _"Building in which an item is found"_ +- `locatedIn` can be labelled as _"located In"_ when labelling the _Relationship_. + +Through reading the NGSI-LD data entity and its associated data model, a computer can obtain as much information as a +human can from reading the human-readable equivalent data specification: + +![](https://fiware.github.io/tutorials.Relationships-Linked-Data/img/shelf-specification.png) + +### Find the store in which a specific shelf is located + +This example returns the `locatedIn` value associated with a given `Shelf` unit. + +If the `id` and `type` of a data entity are known, a specific field can be requested by using the `attrs` parameter. + +#### 7 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Shelf:unit001/' \ + -d 'attrs=locatedIn' \ + -d 'options=keyValues' \ + -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' +``` + +#### Response: + +```json +{ + "@context": "https://fiware.github.io/tutorials.Step-by-Step/datamodels-context.jsonld", + "id": "urn:ngsi-ld:Shelf:unit001", + "type": "Shelf", + "locatedIn": "urn:ngsi-ld:Building:store001" +} +``` + +### Find the IDs of all Shelf Units in a Store + +This example returns the `locatedIn` URNs of all **Shelf** entities found within `urn:ngsi-ld:Building:store001`. This +is purely an instance of using the `q` parameter to filter on attribute value + +#### 8 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities/' \ + -d 'type=Shelf' \ + -d 'options=keyValues' \ + -d 'attrs=locatedIn' \ + -H 'Accept: application/json' \ + -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' +``` + +#### Response: + +The response contains an array displaying + +```json +[ + { + "id": "urn:ngsi-ld:Shelf:unit001", + "type": "Shelf", + "locatedIn": "urn:ngsi-ld:Building:store001" + } +] +``` + +### Adding a 1-many relationship + +To add a 1-many relationship, add an array as the value of `object` attribute. This can be used for simple links without +additional data. This method is used to add **Shelf** entities as `furniture` in the **Store**. + +This is the reciprocal relationship to the `locatedIn` attribute on **Shelf** + +#### 9 Request: + +```bash +curl -X POST \ + http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001/attrs \ + -H 'Content-Type: application/ld+json' \ + -d '{ + "furniture": { + "type": "Relationship", + "object": [ "urn:ngsi-ld:Shelf:001", "urn:ngsi-ld:Shelf:002"] + }, + "@context": [ + "https://fiware.github.io/tutorials.Step-by-Step/datamodels-context.jsonld" + ] +}' +``` + +### Finding all shelf units found within a Store + +To find all the `furniture` within a **Building**, simply make a request to retrieve the `furniture` attribute. + +Because the reicprocal relationship already exists, Additional information can be obtained from the **Shelf** entities +themselves. + +#### 10 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001' \ + -d 'options=keyValues' \ + -d 'attrs=furniture' \ + -H 'Accept: application/json' \ + -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' +``` + +#### Response: + +```json +{ + "id": "urn:ngsi-ld:Building:store001", + "type": "Building", + "furniture": ["urn:ngsi-ld:Shelf:001", "urn:ngsi-ld:Shelf:002"] +} +``` + +### Creating Complex Relationships + +To create a more complex relationship, and additional data entity must be created which holds the current state of the +links between real world items. In the case of the NGSI-LD data model we have already created, a **StockOrder** can be +used to link **Product**, **Building** and **Person** entities and the state of the relationships between them. As well +as _Relationship_ attributes, a **StockOrder** can hold _Property_ attributes (such as the `stockCount`) and other more +complex metadata such as _Properties-of-Properties_ or _Properties-of-Relationships_ + +The **StockOrder** is created as a standard NGSI-LD data entity. + +#### 11 Request: + +```bash +curl -X POST \ + http://localhost:9090/ngsi-ld/v1/entities/ \ + -H 'Content-Type: application/ld+json' \ + -d '{ + "id": "urn:ngsi-ld:StockOrder:001", + "type": "StockOrder", + "requestedFor": { + "type": "Relationship", + "object": "urn:ngsi-ld:Building:store001" + }, + "requestedBy": { + "type": "Relationship", + "object": "urn:ngsi-ld:Person:bob-the-manager" + }, + "orderedProduct": { + "type": "Relationship", + "object": "urn:ngsi-ld:Product:001" + }, + "stockCount": { + "type": "Property", + "value": 10000 + }, + "orderDate": { + "type": "Property", + "value": { + "@type": "DateTime", + "@value": "2018-08-07T12:00:00Z" + } + }, + "@context": [ + "https://fiware.github.io/tutorials.Step-by-Step/datamodels-context.jsonld" + ] +}' +``` + +### Find all stores in which a product is sold + +Since _Relationship_ attributes are just like any other attribute, standard `q` parameter queries can be made on the +**StockOrder** to obtain which entity relates to it. For example the query below returns an array of stores in which a +given product is sold. + +The query `q==orderedProduct="urn:ngsi-ld:Product:001"` is used to filter the entities. + +#### 12 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities/' \ + -d 'type=StockOrder' + -d 'q=orderedProduct==%22urn:ngsi-ld:Product:001%22' \ + -d 'attrs=requestedFor' \ + -d 'options=keyValues' \ + -H 'Accept: application/json' \ + -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' +``` + +#### Response: + +The response returns an array of `requestedFor` attributes in the response. + +```json +[ + { + "id": "urn:ngsi-ld:StockOrder:001", + "type": "StockOrder", + "requestedFor": "urn:ngsi-ld:Building:store001" + } +] +``` + +### Find all products sold in a store + +The query below returns an array of stores in which a given product is sold. + +The query `q==requestedFor="urn:ngsi-ld:Building:store001"` is used to filter the entities. + +#### 13 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities/' \ + -d 'type=StockOrder' \ + -d 'q=requestedFor==%22urn:ngsi-ld:Building:store001%22' \ + -d 'options=keyValues' \ + -d 'attrs=orderedProduct' \ + -H 'Accept: application/json' \ + -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' +``` + +#### Response: + +The response returns an array of `orderedProduct` attributes in the response. This is the reciprocal of the previous +request. + +```json +[ + { + "id": "urn:ngsi-ld:StockOrder:001", + "type": "StockOrder", + "orderedProduct": "urn:ngsi-ld:Product:001" + } +] +``` + +### Obtain Stock Order + +A complete stock order can be obtained by making a standard GET request to the `/ngsi-ld/v1/entities/` endpoint and +adding the appropriate URN. + +#### 14 Request: + +```bash +curl -G -X GET \ + 'http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:StockOrder:001' \ + -d 'options=keyValues' +``` + +#### Response: + +The response returns the fully expanded entity. + +```json +{ + "@context": "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld", + "id": "urn:ngsi-ld:StockOrder:001", + "type": "https://fiware.github.io/tutorials.Step-by-Step/schema/StockOrder", + "https://fiware.github.io/tutorials.Step-by-Step/schema/orderDate": { + "@type": "DateTime", + "@value": "2018-08-07T12:00:00Z" + }, + "https://fiware.github.io/tutorials.Step-by-Step/schema/orderedProduct": "urn:ngsi-ld:Product:001", + "https://fiware.github.io/tutorials.Step-by-Step/schema/requestedBy": "urn:ngsi-ld:Person:bob-the-manager", + "https://fiware.github.io/tutorials.Step-by-Step/schema/requestedFor": "urn:ngsi-ld:Building:store001", + "https://fiware.github.io/tutorials.Step-by-Step/schema/stockCount": 10000 +} +``` diff --git a/scorpio-broker/docs/en/source/security.rst b/scorpio-broker/docs/en/source/security.rst new file mode 100644 index 0000000000000000000000000000000000000000..8a5ba22ba87ded3e3e7ed5440c7dfa1a663c5b75 --- /dev/null +++ b/scorpio-broker/docs/en/source/security.rst @@ -0,0 +1,42 @@ +******************* +Security in Scorpio +******************* + +Security Architecture +##################### + +Scorpio Broker system will also be responsible for any of the Identity & authentication management security. This will include authentication & authorization of requests, users, role base protected resources to access in Scorpio Broker security realm. + +A new Authentication & Authorization service compliant to OAuth2.0 specs has been introduced that will provide the application layer security to the entire Scorpio Broker components & services. + +.. figure:: figures/security.png + + +Security - Functional Request Flow +################################## + +1. Browser/end user sends a resource request which is protected to the Scorpio Broker system using the API gateway REST interface. + +2. API Gateway checks if the security feature is enabled. + + a. If yes then, it checks if the request is already authenticated and already has some existing session. + + - If it does not find any session, then it forwards the request to Authentication & Authorization services. Or + + - If it finds any existing session than it reuses the same session for the authentication purpose and routes the request to the back-end resource service. + + b.If no security is enabled then, it bypasses security check and routes the request to the back-end resource service which is responsible to render the resource against the given request. + +3. Now when the request comes at Authentication & Authorization (Auth in short) service, it responds to the original requester i.e. user/browser with a login form to present their identity based on credentials it has been issued to access the resource. +  +4. So now the user submits the login form with its credential to Auth service. Auth services validate the user credentials based on its Account details and now responded with successful login auth code & also the redirect U to which the user can redirect to fetch its resource request. + +5. User/Browser now redirects at the redirect URL which is in our case is again the API gateway URL with the auth_code that it has received from the Auth service. + +6. Now API gateway again checks the session where it finds the existing session context but now this time since it receives the auth_code in the request so it uses that auth_code and requests the token from Auth service acting as a client on user’s behalf. Auth service based on auth code recognized that it is already logged-in validated user and reverts back with the access token to the API gateway. + +7. The API gateway upon receiving the token (with in the same security session context), now relays/routes to the back-end resource service for the original requested resource/operation. + +8. The back-end resource service is also enabled with security features (if not error will be thrown for the coming secure request). It receives the request and reads the security context out of it and now validates (based on some extracted info) the same with the Auth service to know if this is a valid token/request with the given privileges. Auth service response backs and back-end service decide now whether the local security configuration and the auth service-based access permissions are matching. + +9. If the access permissions/privileges are matched for the incoming request, then it responds back with the requested resources to the user/browser. In case, it does not match the security criteria than it reverts with the error message and reason why it's being denied. \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/systemOverview.rst b/scorpio-broker/docs/en/source/systemOverview.rst new file mode 100644 index 0000000000000000000000000000000000000000..5a166c21f0159aed4deacefb26cf84c8c999d8ec --- /dev/null +++ b/scorpio-broker/docs/en/source/systemOverview.rst @@ -0,0 +1,37 @@ +***************************************** +Architecture +***************************************** +The deployment architecture leverages the Spring Cloud framework that addresses lots of Micro-services concerns(e.g. scaling, monitoring, fault-tolerant, highly available, secure, decoupled, etc. ) and Kafka based distributed and scalable message queue infrastructure to provide high performance on message processing for a huge number of context requests which is usual in the IoT domain. + +It covers the high-level operations (HTTP based REST with method POST/GET/DELETE/PATCH) request flow from the external world to the Scorpio Broker system.  The external request is served through a unified service API gateway interface that exposes a single IP/port combination to be used for all services that the Scorpio Broker system can provide. In reality, each of the Scorpio Broker services have been implemented as a micro-service that can be deployed as an independent standalone unit in a distributed computing environment. ThE API gateway routes all the incoming requests to the specific Micro-services with the help of THE registration & discovery service. Once the request reaches a micro-service based on the operation requirement it uses(pub/sub) Kafka topics (message queues) for real-time storage and for providing intercommunication among different micro-services (based on requirement) over message queues. + +.. figure:: figures/architecture.png + +- **Application**: End-user/domain applications leverage Scorpio Broker to provide the required information about IoT infrastructure. This application can query, subscribe, update context information to/from the Scorpio Broker as per their requirements. +- **Consumers**: These are the IoT entities or applications that consume the data of Scorpio Broker. +- **Producers**: These are the IoT entities, context source, or applications that produce the context data to the Scorpio Broker. +- **Service API Gateway**: This is the proxy gateway for the external world to access the internal services of the Scorpio Broker system exposed via REST-based HTTP interfaces. All internal Scorpio Broker related services can be accessed through this service gateway using its single IP & port (which are usually static) and extending the service name in the URL. Thus the user does not need to take care of (or learn or use) the IP and Port of every service which often changes dynamically. This makes life easier, especially in a case when multiple services (or micro-service) are running under one system. This is easily solved by the use of proxy gateway(i.e. service API gateway) for all the back-end services. +- **Rest Interface**: These are the HTTP based interfaces for the external entities/applications to consume in order to execute certain operations on Scorpio Broker. The external interface would be visible through the Service API gateway and internal interface mapping to each requested service would be discovered through the service registration & discovery module. +- **Service Discovery & Registration**: This component allows registration of any service (web service/micro-service) with it so that any client using discovery functionality of this component can determine the location of a service instance to which it wants to send requests. So in short, a service registry & discovery implements a database of services, their instances, and their locations. Service instances get registered with the service registry on startup and deregistered on shutdown. A client of the service, query the service registry, which discovers the available instances of a service. A service registry might also invoke a service instance’s health check API to verify that it is able to handle requests. +- **Entity Manager**: This component handles all entity related CRUD operations with the help of other components of the Scorpio Broker. +- **LD Context Resolver**: This component is responsible for expanding the NGSI-LD document based on the JSON-LD @context for further processing by the other components of the Scorpio Broker. +- **Subscription & Notification Manager**: This component is responsible for handling CRUD operations related to entities and/or csource subscription & notification. +- **Query Manager**: This component handles simple or complex queries (e.g. geo-query) to the Scorpio Broker. +- **Storage Manager**: This component is responsible for fetching data from the message broker and then transforming them into relevant schema format in order to persist in DB tables. Additionally, this manager also provides interfaces for complex queries to the DB e.g. Geo query or cross-domain entity context relationship queries. +- **Context Registry Manager**: This component is responsible for providing interfaces for CRUD operations of csource registration/query/ subscription. +- **Health Check & Monitoring**: This component is responsible for monitoring the health of running services & infrastructure. +- **Message Bus Handler**: Every module of the Scorpio Broker may need to communicate with the bus for the inter-module exchange of messages. This interface is provided by the message bus handler. +- **Storage Connectors**: The Scorpio Broker needs to store certain information in different DB formats. So storage connectors (using any type of message broker methodology) provide the way to connect to those storage systems (which may be present locally or remotely). For example, the entity information could be stored in/streamed to a different types of storage systems e.g. MySQL, PostgreSQL, Bigdata, etc. These connectors could also be implemented for storage resiliency purposes. +- **Context Registry Connector**: Scorpio Broker needs to communicate to the context registry in order to know about the registered context sources (brokers/providers) and the type of data model they support. The context registry connector allows the message broker mechanism to connect to the context registry that may be running locally or remotely in federated mode. +- **Storage**: This is the actual storage (e.g. Postgres/Postgis) where data is persisted. +- **Context Registry**: This is the component which is responsible for saving the registration of the context sources/producers. + +***************************************** +Deployment Architecture +***************************************** + +This section is covering the deployment architecture of the Scorpio Broker which is using different technologies stack.   + +.. figure:: figures/deploymentarchitecture.png + +The deployment architecture leverages the Spring Cloud framework that addresses lots of Micro-services concerns(e.g. scaling, monitoring, fault-tolerant, highly available, secure, decoupled, etc. ) and Kafka based distributed and scalable message queue infrastructure to provide high performance on message processing for a huge number of context requests which is usual in the IoT domain. The deployment architecture covers the high-level operations (Http based REST with method POST/GET/DELETE/PATCH) request flow from the external world to the Scorpio Broker system.  The external request is served through a unified service API gateway interface that exposes a single IP/port combination to be used for all services that the Scorpio Broker system can provide. In reality, each of the Scorpio Broker services will be implemented as a micro-service that can be deployed as an independent standalone unit in a distributed computing environment. That API gateway routes all the incoming requests to the specific Micro-services with the help of registration & discovery service. Once the request reaches at micro-service based on the operation requirement it uses(pub/sub) Kafka topics (message queues) for real-time storage and for providing intercommunication among different micro-services (based on requirement) over message queues. \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/testing.rst b/scorpio-broker/docs/en/source/testing.rst new file mode 100644 index 0000000000000000000000000000000000000000..f325228444e86d3f752f37081253d9704c71327e --- /dev/null +++ b/scorpio-broker/docs/en/source/testing.rst @@ -0,0 +1,44 @@ +Tests +===== + +Scorpio has two sets of tests. We use JUnit for unit tests and the +FIWARE NGSI-LD Testsuite, which is npm test based, for system tests. + +Running unit tests +------------------ + +A lot of the logic within Scorpio is intertwined with Kafka. Hence a lot +of the unit tests require a running Kafka instance. Start the Kafka +server and zookeeper as described in the Installation chapter. You can +run tests explicitly through Maven with the goal test by running + +.. code:: console + + mvn test + +Unless you add a -DskipTests to your Maven command, tests will also be +run with the goals package, install, verify and deploy. You can run all +the tests by running the Maven command from the root directory or +individual tests by running the Maven command in the corresponding +directory. + +FIWARE NGSI-LD Testsuite +------------------------ + +In order to run the Testsuite you have to have a running instance of +Scorpio as described in the Start the components chapter or use the +dockercontainer You can find the Testsuite here with full instructions +on how to setup and start the Testsuite. The comprehensive version is +this: - Install npm on your system - Download the Testsuite from +`here `__ +- extract the Testsuite - run npm install in the Testsuite folder to +install all dependencies - you need to set 4 environment vars - +TEST\_ENDPOINT, which is the broker. So default should be +http://localhost:9090 for Scorpio - WEB\_APP\_PORT, port for the +Testsuite. This should match the port in all below. E.g. 4444 - +ACC\_ENDPOINT, the endpoint for the testsuit, e.g. http://localhost:4444 +- NOTIFY\_ENDPOINT, the notification endpoint for the tests. Has to end +with /acc. E.g. http://localhost:4444/acc - start Scorpio - start the +accumulator/notification endpoint by running +``console node accumulator/accumulator.js &'`` - start the tests with +``console npm test'`` diff --git a/scorpio-broker/docs/en/source/troubleshooting.rst b/scorpio-broker/docs/en/source/troubleshooting.rst new file mode 100644 index 0000000000000000000000000000000000000000..7d2e8fc2da7f544b14a28fc1e856483fe58fdae1 --- /dev/null +++ b/scorpio-broker/docs/en/source/troubleshooting.rst @@ -0,0 +1,34 @@ +***************** +Troubleshooting +***************** + +Missing JAXB dependencies +========================= + +When starting the eureka-server you may be facing the **java.lang.TypeNotPresentException: Type javax.xml.bind.JAXBContext not present** exception. It's very likely that you are running Java 11 on your machine then. Starting from Java 9 package `javax.xml.bind` has been marked deprecated and was finally completely removed in Java 11. + +In order to fix this issue and get eureka-server running you need to manually add below JAXB Maven dependencies to `ScorpioBroker/SpringCloudModules/eureka/pom.xml` before starting: + +.. code-block:: xml + +    ... +    +            ... +            +                    com.sun.xml.bind +                    jaxb-core +                    2.3.0.1 +            +            +                    javax.xml.bind +                    jaxb-api +                    2.3.1 +            +            +                    com.sun.xml.bind +                    jaxb-impl +                    2.3.1 +            +            ... +    +    ... \ No newline at end of file diff --git a/scorpio-broker/docs/en/source/working-with-linked-data.md b/scorpio-broker/docs/en/source/working-with-linked-data.md new file mode 100644 index 0000000000000000000000000000000000000000..3a24fecae0c5b91d2aef91c0af08d68f78b4828d --- /dev/null +++ b/scorpio-broker/docs/en/source/working-with-linked-data.md @@ -0,0 +1,680 @@ +[![FIWARE Core Context Management](https://nexus.lab.fiware.org/repository/raw/public/badges/chapters/core.svg)](https://github.com/FIWARE/catalogue/blob/master/core/README.md) +[![NGSI LD](https://img.shields.io/badge/NGSI-LD-d6604d.svg)](https://www.etsi.org/deliver/etsi_gs/CIM/001_099/009/01.01.01_60/gs_CIM009v010101p.pdf) +[![JSON LD](https://img.shields.io/badge/JSON--LD-1.1-f06f38.svg)](https://w3c.github.io/json-ld-syntax/) + +This tutorial teaches FIWARE users how to architect and design a system based on **linked data** and to alter linked +data context programmatically. The tutorial extends the knowledge gained from the equivalent +[NGSI-v2 tutorial](accessing-context.md) and enables a user understand how to write code in an +[NGSI-LD](https://www.etsi.org/deliver/etsi_gs/CIM/001_099/009/01.01.01_60/gs_CIM009v010101p.pdf) capable +[Node.js](https://nodejs.org/) [Express](https://expressjs.com/) application in order to retrieve and alter context +data. This removes the need to use the command-line to invoke cUrl commands. + +The tutorial is mainly concerned with discussing code written in Node.js, however some of the results can be checked by +making [cUrl](https://ec.haxx.se/) commands. +[Postman documentation](https://fiware.github.io/tutorials.Working-with-Linked-Data) for the same commands is also +available. + +[![Run in Postman](https://run.pstmn.io/button.svg)](https://github.com/ScorpioBroker/ScorpioBroker/blob/feature-80-temp/docs/en/source/Payloads/FIWARE%20Working%20with%20Linked%20Data.postman_collection.json) + +
+ +# Working with Linked Data Entities + +> - “This is the house that Jack built. +> - This is the malt that lay in the house that Jack built. +> - This is the rat that ate the malt
That lay in the house that Jack built. +> - This is the cat
That killed the rat that ate the malt
That lay in the house that Jack built. +> - This is the dog that chased the cat
That killed the rat that ate the malt
That lay in the house that +> Jack built.†+> +> ― This Is the House That Jack Built, Traditional English Nursery Rhyme + +NGSI-LD is an evolution of NGSI-v2, so it should not be surprising that Smart solutions based on NGSI-LD will need to +cover the same basic scenarios as outlined in the previous NGSI-v2 [tutorial](accessing-context.md) on programatic data +access. + +NGSI-LD Linked data formalizes the structure of context entities to a greater degree, through restricting data +attributes to be defined as either _Property_ attributes or _Relationship_ attributes only. This means that it is +possible to traverse the context data graph with greater certainty when moving from one _Relationship_ to another. All +the context data entities within the system are defined by JSON-LD data models, which are formally defined by +referencing a context file, and this programmatic definition should guarantee that the associated linked entity exists. + +Three basic data access scenarios for the supermarket are defined below: + +- Reading Data - e.g. Give me all the data for the **Building** entity `urn:ngsi-ld:Building:store001` +- Aggregation - e.g. Combine the **Products** entities sold in **Building** `urn:ngsi-ld:Building:store001` and + display the goods for sale +- Altering context within the system - e.g. Make a sale of a product: + - Update the daily sales records by the price of the **Product** + - decrement the `numberOfItems` of the **Shelf** entity + - Create a new Transaction Log record showing the sale has occurred + - Raise an alert in the warehouse if less than 10 objects remain on sale + - etc. + +Further advanced scenarios will be covered in later tutorials + +## Linked Data Entities within a stock management system + +The supermarket data created in the [previous tutorial](relationships-linked-data.md) will be loaded into the context +broker. The existing relationships between the entities are defined as shown below: + +![](https://fiware.github.io/tutorials.Working-with-Linked-Data/img/entities-ld.png) + +The **Building**, **Product**, **Shelf** and **StockOrder** entities will be used to display data on the frontend of our +demo application. + +## The teaching goal of this tutorial + +The aim of this tutorial is to improve developer understanding of programmatic access of context data through defining +and discussing a series of generic code examples covering common data access scenarios. For this purpose a simple +Node.js Express application will be created. + +The intention here is not to teach users how to write an application in Express - indeed any language could have been +chosen. It is merely to show how **any** sample programming language could be used alter the context to achieve the +business logic goals. + +Obviously, your choice of programming language will depend upon your own business needs - when reading the code below +please keep this in mind and substitute Node.js with your own programming language as appropriate. + +# Stock Management Frontend + +All the code Node.js Express for the demo can be found within the `ngsi-ld` folder within the GitHub repository. +[Stock Management example](https://github.com/FIWARE/tutorials.Step-by-Step/tree/master/context-provider). The +application runs on the following URLs: + +- `http://localhost:3000/app/store/urn:ngsi-ld:Building:store001` +- `http://localhost:3000/app/store/urn:ngsi-ld:Building:store002` +- `http://localhost:3000/app/store/urn:ngsi-ld:Building:store003` +- `http://localhost:3000/app/store/urn:ngsi-ld:Building:store004` + +> **Tip** Additionally, you can also watch the status of recent requests yourself by following the container logs or +> viewing information on `localhost:3000/app/monitor` on a web browser. +> +> ![FIWARE Monitor](https://fiware.github.io/tutorials.Working-with-Linked-Data/img/monitor.png) + +# Architecture + +The demo Supermarket application will send and receive NGSI-LD calls to a compliant context broker. Since the NGSI-LD +interface is available on the +[Scorpio Broker](https://scorpio.readthedocs.io/en/latest/), the demo application will make use of this +FIWARE component. + +Currently, the Scorpio Broker relies PostgreSQL to keep +persistence of the context data it holds. To request context data from external sources, a simple Context Provider NGSI +proxy has also been added. To visualize and interact with the Context we will add a simple Express application + +Therefore, the architecture will consist of three elements: + +- The [Scorpio Broker](https://scorpio.readthedocs.io/en/latest/) which will receive requests using + [NGSI-LD](https://forge.etsi.org/swagger/ui/?url=https://forge.etsi.org/gitlab/NGSI-LD/NGSI-LD/raw/master/spec/updated/full_api.json) +- The underlying PostgreSQL database : + - Used by the Scorpio Broker to hold context data information such as data entities, subscriptions and + registrations +- The **Stock Management Frontend** which will: + - Display store information + - Show which products can be bought at each store + - Allow users to "buy" products and reduce the stock count. + +Since all interactions between the elements are initiated by HTTP requests, the entities can be containerized and run +from exposed ports. + +![](figures/tutorialArchitecture.png) + +# Traversing Linked Data Programmatically + +Goto `http://localhost:3000/app/store/urn:ngsi-ld:Building:store001` to display and interact with the working +Supermarket data application. + +![](https://fiware.github.io/tutorials.Working-with-Linked-Data/img/store.png) + +## Reading Linked Data + +The code under discussion can be found within the `ngsi-ld/store` controller in the +[Git Repository](https://github.com/FIWARE/tutorials.Step-by-Step/blob/master/context-provider/controllers/ngsi-ld/store.js) + +### Initializing the library + +As usual, the code for HTTP access can be split out from the business logic of the Supermarket application itself. The +lower level calls have been placed into a library file, which simplifies the codebase. This needs to be included in the +header of the file as shown. Some constants are also required - for the Supermarket data, the `LinkHeader` is used to +define location of the data models JSON-LD context as +`https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld`. + +```javascript +const ngsiLD = require("../../lib/ngsi-ld"); + +const LinkHeader = + '; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json">'; +``` + +### Retrieve a known Store + +This example reads the context data of a given **Store** entity to display the results on screen. Reading entity data +can be done using the `ngsiLD.readEntity()` method - this will fill out the URL for the GET request and make the +necessary HTTP call in an asynchronous fashion: + +```javascript +async function displayStore(req, res) { + const store = await ngsiLD.readEntity( + req.params.storeId, + { options: "keyValues" }, + ngsiLD.setHeaders(req.session.access_token, LinkHeader) + ); + + return res.render("store", { title: store.name, store }); +} +``` + +The function above also sends some standard HTTP Headers as part of the request - these are defined in the +`setHeaders()` function. + +Within an NGSI-LD-based system, the usual default HTTP headers would include a `Link` header to send the JSON-LD context +and a `Content-Type` header to identify the request as `application/ld+json` (note that every NGSI-LD request is valid +JSON_LD since NGSI-LD is a subset of JSON-LD). Other additional headers such as `X-Auth-Token` can be added to enable +OAuth2 security. + +```javascript +function setHeaders(accessToken, link, contentType) { + const headers = {}; + if (accessToken) { + headers["X-Auth-Token"] = accessToken; + } + if (link) { + headers.Link = link; + } + if (contentType) { + headers["Content-Type"] = contentType || "application/ld+json"; + } + return headers; +} +``` + +Within the `lib/ngsi-ld.js` library file, the `BASE_PATH` defines the location of the Scorpio Broker, reading a +data entity is simply a wrapper around an asynchronous HTTP GET request passing the appropriate headers + +```javascript +const BASE_PATH = process.env.CONTEXT_BROKER || "http://localhost:9090/ngsi-ld/v1"; + +function readEntity(entityId, opts, headers = {}) { + return request({ + qs: opts, + url: BASE_PATH + "/entities/" + entityId, + method: "GET", + headers, + json: true + }); +} +``` + +The equivalent cUrl statement can be seen below: + +```bash +curl -G -X GET 'http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001/' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' \ +-H 'Content-Type: application/ld+json' \ +-d 'type=Building' \ +-d 'options=keyValues' +``` + +## Aggregating and Traversing Linked Data + +To display information at the till, it is necessary to discover information about the products found within a Store. +From the Data Entity diagram we can ascertain that: + +- **Building** entities hold related **Shelf** information within the `furniture` _Relationship_ +- **Shelf** entities hold related **Product** information within the `stocks` _Relationship_ +- Products hold `name` and `price` as _Property_ attributes of the **Product** entity itself. + +Therefore the code for the `displayTillInfo()` method will consist of the following steps. + +1. Make a request to the Context Broker to _find shelves within a known store_ +2. Reduce the result to a `id` parameter and make a second request to the Context Broker to _retrieve stocked products + from shelves_ +3. Reduce the result to a `id` parameter and make a third request to the Context Broker to _retrieve product details + for selected shelves_ + +To users familiar with database joins, it may seem strange being forced to making a series of requests like this, +however it is necessary due to scalability issues/concerns in a large distributed setup. Direct join requests are not +possible with NGSI-LD. + +### Find Shelves within a known Store + +To access the `furniture` attribute of a known **Building** entity, a `keyValues` request is made using the `attrs` +parameter. + +```javascript +const building = await ngsiLD.readEntity( + req.params.storeId, + { + type: "Building", + options: "keyValues", + attrs: "furniture" + }, + ngsiLD.setHeaders(req.session.access_token, LinkHeader) +); +``` + +The equivalent cUrl statement can be seen below: + +```bash +curl -G -X GET 'http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store001/' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' \ +-H 'Content-Type: application/ld+json' \ +-d 'type=Building' \ +-d 'options=keyValues' \ +-d 'attrs=furniture' \ +``` + +The response is a JSON Object which includes a `furniture` attribute which can be manipulated further. + +### Retrieve Stocked Products from shelves + +To retrieve a series of **Shelf** entities, the `ngsiLD.listEntities()` function is called and filtered using the `id` +parameter. The `id` is just a comma separated list taken from the request above. + +```javascript +let productsList = await ngsiLD.listEntities( + { + type: "Shelf", + options: "keyValues", + attrs: "stocks,numberOfItems", + id: building.furniture.join(",") + }, + ngsiLD.setHeaders(req.session.access_token, LinkHeader) +); +``` + +`listEntities()` is another function within the `lib/ngsi-ld.js` library file + +```javascript +function listEntities(opts, headers = {}) { + return request({ + qs: opts, + url: BASE_PATH + "/entities", + method: "GET", + headers, + json: true + }); +} +``` + +The equivalent cUrl statement can be seen below: + +```bash +curl -G -X GET 'http://localhost:9090/ngsi-ld/v1/entities/' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' \ +-H 'Content-Type: application/ld+json' \ +-H 'Accept: application/json' \ +-d 'type=Shelf' \ +-d 'options=keyValues' \ +-d 'attrs=stocks,numberOfItems' \ +-d 'id=urn:ngsi-ld:Shelf:unit001,urn:ngsi-ld:Shelf:unit002,urn:ngsi-ld:Shelf:unit003' +``` + +The response is a JSON Array of **Shelf** entities which includes as `stocks` attribute which can be manipulated +further. The code below extracts the IDs for later use. + +```javascript +const stockedProducts = []; + +productsList = _.groupBy(productsList, e => { + return e.stocks; +}); +_.forEach(productsList, (value, key) => { + stockedProducts.push(key); +}); +``` + +### Retrieve Product Details for selected shelves + +To retrieve a series of **Product** entities, the `ngsiLD.listEntities()` function is once again called and filtered +using the `id` parameter. The `id` is just a comma separated list taken from the request above. + +```javascript +let productsInStore = await ngsiLD.listEntities( + { + type: "Product", + options: "keyValues", + attrs: "name,price", + id: stockedProducts.join(",") + }, + headers +); +``` + +The equivalent cUrl statement can be seen below: + +```bash +curl -G -X GET 'http://localhost:9090/ngsi-ld/v1/entities/' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' \ +-H 'Content-Type: application/ld+json' \ +-H 'Accept: application/json' \ +-d 'type=Product' \ +-d 'options=keyValues' \ +-d 'attrs=name,price' \ +-d 'id=urn:ngsi-ld:Product:001,urn:ngsi-ld:Product:003,urn:ngsi-ld:Product:004' +``` + +The response is a JSON Array of **Product** entities which are then displayed on screen. + +## Updating Linked Data + +### Find a shelf stocking a product + +To retrieve a series of **Shelf** entities, the `ngsiLD.listEntities()` function is called. It is important to retrieve +the current context before amending it, so the `q` parameter is used to only retrieve a shelf from the correct store +containing the correct product. This request is only possible because the **Shelf** data model has been designed to hold +_relationships_ with both **Building** and **Product**. + +```javascript +const shelf = await ngsiLD.listEntities( + { + type: "Shelf", + options: "keyValues", + attrs: "stocks,numberOfItems", + q: 'numberOfItems>0;locatedIn=="' + req.body.storeId + '";stocks=="' + req.body.productId + '"', + limit: 1 + }, + headers +); +``` + +The equivalent cUrl statement can be seen below: + +```bash +curl -G -X GET 'http://localhost:9090/ngsi-ld/v1/entities/' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' \ +-H 'Content-Type: application/ld+json' \ +-H 'Accept: application/json' \ +-d 'type=Shelf' \ +-d 'options=keyValues' \ +-d 'q=numberOfItems%3E0;locatedIn==%22urn:ngsi-ld:Building:store001%22;stocks==%22urn:ngsi-ld:Product:001%22' +``` + +### Update the state of a shelf + +To update an entity a PATCH request is made using the `id` of the **Shelf** returned in the previous request + +```javascript +const count = shelf[0].numberOfItems - 1; +await ngsiLD.updateAttribute( + shelf[0].id, + { numberOfItems: { type: "Property", value: count } }, + ngsiLD.setHeaders(req.session.access_token, LinkHeader) +); +``` + +The asynchronous PATCH request is found in the `updateAttribute()` function within the `lib/ngsi-ld.js` library file + +```javascript +function updateAttribute(entityId, body, headers = {}) { + return request({ + url: BASE_PATH + "/entities/" + entityId + "/attrs", + method: "PATCH", + body, + headers, + json: true + }); +} +``` + +The equivalent cUrl statement can be seen below: + +```bash +curl -X PATCH 'http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Shelf:unit001/attrs' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' \ +-H 'Content-Type: application/json' \ +-d '{ "numberOfItems": { "type": "Property", "value": 10 } }' +``` + +## Interoperability using Linked Data + +The introduction of Linked Data concepts to NGSI has so far marginally increased the complexity of all the context +broker requests and we have not yet demonstrated additional benefit. The idea behind linked data is to improve data +interoperability and remove data silos. + +As a demonstration of this, imagine we which to incorporate context data entities from another context provider who is +using a different schema. Rather than using `name`, `category`, `location` etc, our Japanese context provider is using +data attributes based on Kanji characters. + +The core NGSI-LD `@context` defines that `name` = `https://uri.etsi.org/ngsi-ld/name`, similarly we can define `åå‰` = +`https://uri.etsi.org/ngsi-ld/name` and introduce alternate mappings for attribute names and enumerated values. + +Provided that two systems can agree upon a **common** system of unique URIs for data interchange, they are free to +locally re-interpret those values within their own domain. + +### Creating an Entity using an Alternate Schema + +An alternative Japanese JSON-LD `@context` file has been created and published to an external server. The file can be +found here: `https://fiware.github.io/tutorials.Step-by-Step/japanese-context.jsonld`. Alternate data mappings can be +found for all attribute names used within the tutorials. + +> **Note**: For comparision the standard tutorial JSON-LD `@context` file can be found here: +> `https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld` + +#### 1 Request: + +When creating a data entity, short names for all of the URIs mapped in the Japanese JSON-LD `@context` can be used +freely in the payload of the request. + +As can be seen in the example below, attribute names and enumerated values (such as `ビル` = `Building`) can be used +throughout. The NGSI-LD specification mandates that the attributes defined in the NGSI-LD API (i.e. the core `@context`) +are used to define the attributes. Therefore elements of the request such as `id` `type` and `Property` remain +unchanged, although as we will see below this can be circumvented. + +Our Japanese context provider can create a new `Building` using the request below, the `Link` header is pointing to the +Japanese JSON-LD `@context` file which supplies the full URIs for the attribute names and enumerations. + +```bash +curl -L -X POST 'http://localhost:9090/ngsi-ld/v1/entities/' \ +-H 'Content-Type: application/ld+json' \ +--data-raw '{ + "id": "urn:ngsi-ld:Building:store005", + "type": "ビル", + "カテゴリー": {"type": "Property", "value": ["コマーシャル"]}, + "使‰€": { + "type": "Property", + "value": { + "streetAddress": "Eisenacher Straße 98", + "addressRegion": "Berlin", + "addressLocality": "Marzahn", + "postalCode": "12685" + } + }, + "場所": { + "type": "GeoProperty", + "value": {"type": "Point","coordinates": [13.5646, 52.5435]} + }, + "åå‰": {"type": "Property","value": "Yuusui-en"}, + "@context":"https://fiware.github.io/tutorials.Step-by-Step/japanese-context.jsonld" +}' +``` + +Note that in this example the name and address have been supplied as simple strings - JSON-LD does support an `@lang` +definition to allow for internationalization, but this is an advanced topic which will not be discussed here. + +### Reading an Entity using the default schema + +Within the context broker the full URIs are used to refer to the attributes and enumerations. Even though it uses +different attribute short names, the Japanese JSON-LD `@context` file agrees with the standard tutorial context about +the full URIs used for a **Building** entity - effectively it is using the same data model. + +Therefore it is possible to request the new **Building** (created using the Japanese data model) and have it return +using the short names specified in the standard tutorial JSON-LD `@context`, this is done by supplying the `Link` header +is pointing to the tutorial JSON-LD `@context` file. + +#### 2 Request: + +```bash +curl -L -X GET 'http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store005' \ +-H 'Content-Type: application/ld+json' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' +``` + +#### Response: + +The response is an ordinary **Building** entity which standard attribute names (such as `name` and `location` and it +also returns the standard enumeration for **Building** `category`. + +```json +{ + "@context": "https://fiware.github.io/tutorials.Step-by-Step/tutorials-context.jsonld", + "id": "urn:ngsi-ld:Building:store005", + "type": "Building", + "address": { + "type": "Property", + "value": { + "streetAddress": "Eisenacher Straße 98", + "addressRegion": "Berlin", + "addressLocality": "Marzahn", + "postalCode": "12685" + } + }, + "location": { + "type": "GeoProperty", + "value": { "type": "Point", "coordinates": [13.5646, 52.5435] } + }, + "name": { "type": "Property", "value": "Yuusui-en" }, + "category": { "type": "Property", "value": "commercial" } +} +``` + +This means that our Supermarket application is able to display the new building without any modification to the +underlying codebase. The data is interoperable. + +Goto `http://localhost:3000/app/store/urn:ngsi-ld:Building:store005` to show that the new **Building** can be displayed: + +![](https://fiware.github.io/tutorials.Working-with-Linked-Data/img/store5.png) + +### Reading an Entity using an alternate schema + +With one exception, there is no hierarchy defined within NGSI-LD `@context` files - any defined `@context` is therefore +possible to read any of the existing **Building** entities and apply the Japanese `@context`. The `@context` to used is +supplied in the `Link` header. + +#### 3 Request: + +```bash +curl -L -X GET 'http://localhost:9090/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store003' \ +-H 'Content-Type: application/ld+json' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' +``` + +#### Response: + +The response is mixed - it uses attribute names and enumerations defined in `japanese-context.jsonld` with some +exceptions. NGSI-LD **is not** JSON-LD, in that the core context is always applied after the contexts received in the +`Link` header. Since `name` and `location` are reserved attribute names, they are supplied using the default core +context. + +```json +{ + "@context": "https://fiware.github.io/tutorials.Step-by-Step/japanese-context.jsonld", + "id": "urn:ngsi-ld:Building:store003", + "type": "ビル", + "å®¶å…·": { + "type": "Relationship", + "object": ["urn:ngsi-ld:Shelf:unit006", "urn:ngsi-ld:Shelf:unit007", "urn:ngsi-ld:Shelf:unit008"] + }, + "使‰€": { + "type": "Property", + "value": { + "streetAddress": "Mühlenstrasse 10", + "addressRegion": "Berlin", + "addressLocality": "Friedrichshain", + "postalCode": "10243" + }, + "検証済ã¿": { "type": "Property", "value": false } + }, + "name": { "type": "Property", "value": "East Side Galleria" }, + "カテゴリー": { "type": "Property", "value": "コマーシャル" }, + "location": { + "type": "GeoProperty", + "value": { "type": "Point", "coordinates": [13.4447, 52.5031] } + } +} +``` + +### Applying Entity Expansion/Compaction + +The Within JSON-LD there is a standard mechanism for applying and altering local attribute names. The response from the +context broker will always be valid NGSI-LD. NGSI-LD is just a structured subset of JSON-LD, so further changes can be +made to use the data received as JSON. + +If we need to overide the core NGSI-LD context, we can apply an additional expansion/compaction operation over the +response to retrive the data in a fully converted fashion for local use. + +JSON-LD libraries already exist to do this work. + +```javascript +const coreContext = require("./jsonld-context/ngsi-ld.json"); +const japaneseContext = require("./jsonld-context/japanese.json"); + +function translateRequest(req, res) { + request({ + url: BASE_PATH + req.path, + method: req.method, + headers: req.headers, + qs: req.query, + json: true + }) + .then(async function(cbResponse) { + cbResponse["@context"] = coreContext; + const expanded = await jsonld.expand(cbResponse); + const compacted = await jsonld.compact(expanded, japaneseContext); + delete compacted["@context"]; + return res.send(compacted); + }) + .catch(function(err) { + return res.send(err); + }); +} +``` + +#### 4 Request: + +A `/japanese` endpoint has been created which forwards a request to the context broker and then applies an +expansion/compaction operation. + +```bash +curl -L -X GET 'http://localhost:3000/japanese/ngsi-ld/v1/entities/urn:ngsi-ld:Building:store005' \ +-H 'Accept: application/json' \ +-H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' +``` + +#### Response: + +The response after the expansion/compaction operation is data which now uses all of the preferred attribute names - this +is **no longer** valid NGSI-LD, but would be of use if the receiving system requests data in this format. + +Note that the reverse expansion/compaction operation could be used to convert this JSON back into a valid NGSI-LD +payload before sending data to the context broker. + +```json +{ + "識別å­": "urn:ngsi-ld:Building:store005", + "タイプ": "ビル", + "カテゴリー": { "タイプ": "プロパティ", "値": "コマーシャル" }, + "使‰€": { + "タイプ": "プロパティ", + "値": { + "addressLocality": "Marzahn", + "addressRegion": "Berlin", + "postalCode": "12685", + "streetAddress": "Eisenacher Straße 98" + } + }, + "場所": { + "タイプ": "ジオプロパティ", + "値": { "タイプ": "Point", "座標": [13.5646, 52.5435] } + }, + "åå‰": { "タイプ": "プロパティ", "値": "Yuusui-en" } +} +``` + +#### Video: JSON-LD Compaction & Expansion + +[![](https://fiware.github.io/tutorials.Step-by-Step/img/video-logo.png)](https://www.youtube.com/watch?v=Tm3fD89dqRE "JSON-LD Compaction & Expansion") + +Click on the image above to watch a video JSON-LD expansion and compaction with reference to the `@context` and +interoperability. diff --git a/scorpio-broker/docs/ja/source/API_walkthrough.rst b/scorpio-broker/docs/ja/source/API_walkthrough.rst new file mode 100644 index 0000000000000000000000000000000000000000..3756509ca6b85da5e9087eced83e7b18cbd76cb0 --- /dev/null +++ b/scorpio-broker/docs/ja/source/API_walkthrough.rst @@ -0,0 +1,1461 @@ +****************** +イントロダクション +****************** + +ã“ã®ã‚¦ã‚©ãƒ¼ã‚¯ã‚¹ãƒ«ãƒ¼ã§ã¯ã€èª­è€…㌠NGSI-LD 全般ã€ç‰¹ã« Scorpio Broker ã«ç²¾é€šã—ã€ãã®éŽç¨‹ã‚’楽ã—ã‚“ã§ã„ãŸã ã‘るよã†ã€å®Ÿè·µçš„㪠+アプローãƒã‚’採用ã—ã¦ã„ã¾ã™:)。 + +ウォークスルー㯠NGSI-LD 仕様ã«åŸºã¥ã„ã¦ãŠã‚Šã€ã“ã“ +[https://www.etsi.org/deliver/etsi_gs/CIM/001_099/009/01.02.02_60/gs_CIM009v010202p.pdf] ã«ã‚りã¾ã™ã€‚-> ã¾ã‚‚ãªã +gs_CIM009v010301p.pdf ã«ãªã‚Šã¾ã™... NGSI-LD ã®å®Ÿè£…ã«é–¢ã™ã‚‹æ³¨æ„事項もã”覧ãã ã•ã„。--> 利用å¯èƒ½ã«ãªã£ãŸã‚‰ NGSI-LD +ã«æ…£ã‚Œã‚‹ãŸã‚ã«ã€NGSI-LD 入門書 [https://www.etsi.org/deliver/etsi_gr/CIM/001_099/008/01.01.01_60/gr_CIM008v010101p.pdf] +ã‚‚ã”覧ãã ã•ã„。ã“れã¯é–‹ç™ºè€…を対象ã¨ã—ã¦ã„ã¾ã™ã€‚ + +メイン セクションã¯ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆç®¡ç†ã«ã¤ã„ã¦ã§ã™ã€‚コンテキスト管ç†ã®åŸºæœ¬çš„ãªã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆ ブローカー機能 (è»Šã®æ¸©åº¦ãªã©ã® +エンティティã«é–¢ã™ã‚‹æƒ…å ±) ã«ã¤ã„ã¦èª¬æ˜Žã—ã¾ã™ã€‚コンテキスト ã‚½ãƒ¼ã‚¹ç®¡ç† (エンティティ自体ã«é–¢ã™ã‚‹æƒ…å ±ã§ã¯ãªã〠+分散システム ã‚»ãƒƒãƒˆã‚¢ãƒƒãƒ—ã§æƒ…報をæä¾›ã§ãるソースã«é–¢ã™ã‚‹æƒ…å ±) ã‚‚ã€ã“ã®ãƒ‰ã‚­ãƒ¥ãƒ¡ãƒ³ãƒˆã®ä¸€éƒ¨ã¨ã—ã¦èª¬æ˜Žã—ã¦ã„ã¾ã™ã€‚ + +é–‹å§‹ã™ã‚‹å‰ã«ã€NGSI-LD モデルã®åŸºç¤Žã¨ãªã‚‹ç†è«–的概念をç†è§£ã™ã‚‹ã“ã¨ã‚’ãŠå‹§ã‚ã—ã¾ã™ã€‚エンティティã€ãƒ—ロパティ〠+リレーションシップãªã©ã€‚ã“れã«é–¢ã™ã‚‹ FIWARE ドキュメントã€ãŸã¨ãˆã°ã“ã®å…¬é–‹ãƒ—レゼンテーションをã”覧ãã ã•ã„。 +[... é©åˆ‡ãªãƒ—レゼンテーションを見ã¤ã‘ã‚‹] + +ãƒãƒ¥ãƒ¼ãƒˆãƒªã‚¢ãƒ«ç”¨ã® ScorpioBroker ã®èµ·å‹• +####################################### + +ブローカーを起動ã™ã‚‹ã«ã¯ã€docker-compose を使用ã™ã‚‹ã“ã¨ã‚’ãŠå‹§ã‚ã—ã¾ã™ã€‚Scorpio ã®github リãƒã‚¸ãƒˆãƒªã‹ã‚‰ docker-compose +ファイルをå–å¾—ã—ã¾ã™ã€‚ +:: + + curl https://raw.githubusercontent.com/ScorpioBroker/ScorpioBroker/development/docker-compose-aaio.yml + + +ãã—ã¦ã€æ¬¡ã®ã‚³ãƒžãƒ³ãƒ‰ã§ã‚³ãƒ³ãƒ†ãƒŠã‚’èµ·å‹•ã—ã¾ã™ +:: + + sudo docker-compose -f docker-compose-aaio.yml up + +Docker ãªã—ã§ãƒ–ローカーを起動ã™ã‚‹ã“ã¨ã‚‚ã§ãã¾ã™ã€‚è©³ç´°ãªæ‰‹é †ã«ã¤ã„ã¦ã¯ã€ +readme https://github.com/ScorpioBroker/ScorpioBroker/blob/development/README.md ã‚’å‚ç…§ã—ã¦ãã ã•ã„。 + +ブローカーã¸ã®ã‚³ãƒžãƒ³ãƒ‰ã®ç™ºè¡Œ +############################ + +ブローカーã«ãƒªã‚¯ã‚¨ã‚¹ãƒˆã‚’発行ã™ã‚‹ã«ã¯ã€curl コマンドラインツールを使用ã§ãã¾ã™ã€‚curl ã‚’é¸æŠžã—ãŸã®ã¯ã€ã»ã©ã‚“ã©ã® +GNU/Linux システムã§åˆ©ç”¨å¯èƒ½ã§ã€ã“ã®ãƒ‰ã‚­ãƒ¥ãƒ¡ãƒ³ãƒˆã«ç°¡å˜ã«ã‚³ãƒ”ーã—ã¦è²¼ã‚Šä»˜ã‘ã‚‹ã“ã¨ãŒã§ãる例をå«ã‚ã‚‹ã®ãŒç°¡å˜ã ã‹ã‚‰ã§ã™ã€‚ +ã‚‚ã¡ã‚ã‚“ã€ã“れを使用ã™ã‚‹ã“ã¨ã¯å¿…é ˆã§ã¯ã‚りã¾ã›ã‚“。代ã‚りã«ä»»æ„ã® REST クライアントツール (RESTClient ãªã©) +を使用ã§ãã¾ã™ã€‚実際ã«ã¯ã€ã‚¢ãƒ—リケーション㮠REST クライアント部分を実装ã™ã‚‹ãƒ—ログラミング言語ライブラリを使用ã—㦠+ScorpioBroker ã¨å¯¾è©±ã™ã‚‹ã“ã¨ã«ãªã‚Šã¾ã™ã€‚ + +ã“ã®ãƒ‰ã‚­ãƒ¥ãƒ¡ãƒ³ãƒˆã®ã™ã¹ã¦ã® curl ã®ä¾‹ã®åŸºæœ¬çš„ãªãƒ‘ã‚¿ãƒ¼ãƒ³ã¯æ¬¡ã®ã¨ãŠã‚Šã§ã™: + +POST ã®å ´åˆ: +curl localhost:9090/ngsi-ld/v1/ -s -S [headers]' -d @- < -s -S [headers] -X PUT -d @- < -s -S [headers] -X PATCH -d @- < -s -S [headers] +DELETE ã®å ´åˆ: +curl localhost:9090/ngsi-ld/v1/ -s -S [headers] -X DELETE +[headers] ã«é–¢ã—ã¦ã¯æ¬¡ã®ã‚‚ã®ã‚’å«ã‚ã¾ã™: + +Accept header ã§ã¯ã€ãƒ¬ã‚¹ãƒãƒ³ã‚¹ã‚’å—ä¿¡ã™ã‚‹ãƒšã‚¤ãƒ­ãƒ¼ãƒ‰å½¢å¼ã‚’指定ã—ã¾ã™ã€‚JSON ã¾ãŸã¯ JSON-LD ã‚’æ˜Žç¤ºçš„ã«æŒ‡å®šã™ã‚‹å¿…è¦ãŒ +ã‚りã¾ã™ã€‚curl ... -H 'Accept: application/json' ... ã¾ãŸã¯ curl ... -H 'Accept: application/ld-json' +ã“れã¯ã€JSON-LD @context ã‚’ link header ã§å—ä¿¡ã™ã‚‹ã‹ã€ãƒ¬ã‚¹ãƒãƒ³ã‚¹ã®ãƒœãƒ‡ã‚£ã§å—ä¿¡ã™ã‚‹ã‹ã«ã‚ˆã£ã¦ç•°ãªã‚Šã¾ã™ +(JSON-LD 㨠@context ã®ä½¿ç”¨ã«ã¤ã„ã¦ã¯ã€æ¬¡ã®ã‚»ã‚¯ã‚·ãƒ§ãƒ³ã§èª¬æ˜Žã—ã¾ã™) + +リクエスト (ãŸã¨ãˆã° POST, PUT ã¾ãŸã¯ PATCH) ã§ãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ä½¿ç”¨ã™ã‚‹å ´åˆã¯ã€å½¢å¼ (JSON ã¾ãŸã¯ JSON-LD) ã‚’ +指定ã™ã‚‹ãŸã‚ã«Context-Type HTTP header を指定ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ +curl ... -H 'Content-Type: application/json' ... ã¾ãŸã¯ -H 'Content-Type: application/ld+json' + +JSON-LD @context ãŒãƒªã‚¯ã‚¨ã‚¹ãƒˆãƒœãƒ‡ã‚£ã®ä¸€éƒ¨ã¨ã—ã¦æä¾›ã•れã¦ã„ãªã„å ´åˆã¯ã€link header ã¨ã—ã¦æä¾›ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ (例: +curl ... -H 'Link: ; +rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json" ã“ã“ã§ã€@context ã¯æœ€åˆã® URI +ã‹ã‚‰å–å¾—å¯èƒ½ã§ã‚ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ã¤ã¾ã‚Šã€ã“ã®ä¾‹ã§ã¯: https://uri.etsi.org/ngsi-ld/primer/store-context.jsonld + +ã„ãã¤ã‹ã®è¿½åŠ ã®ã‚³ãƒ¡ãƒ³ãƒˆ: + +ã»ã¨ã‚“ã©ã®å ´åˆã€è¤‡æ•°è¡Œã®ã‚·ã‚§ãƒ«ã‚³ãƒžãƒ³ãƒ‰ã‚’使用ã—㦠curl ã¸ã®å…¥åŠ›ã‚’æä¾›ã—ã€EOF を使用ã—ã¦è¤‡æ•°è¡Œã®ãƒ–ロック +(ヒアドキュメント) ã®é–‹å§‹ã¨çµ‚了をマークã—ã¾ã™ã€‚å ´åˆã«ã‚ˆã£ã¦ã¯ (GET ãŠã‚ˆã³ DELETE)ã€ãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ãŒä½¿ç”¨ã•れãªã„ãŸã‚〠+-d @- ã‚’çœç•¥ã—ã¾ã™ã€‚ + +例ã§ã¯ã€ãƒ–ローカーãŒãƒãƒ¼ãƒˆ 9090 ã§ãƒªãƒƒã‚¹ãƒ³ã—ã¦ã„ã‚‹ã¨æƒ³å®šã—ã¦ã„ã¾ã™ã€‚別ã®ãƒãƒ¼ãƒˆã‚’使用ã—ã¦ã„ã‚‹å ´åˆã¯ã€curl +コマンドラインã§ã“れを調整ã—ã¦ãã ã•ã„。 + +レスãƒãƒ³ã‚¹ã§ JSON ã‚’ãれã„ã«å‡ºåŠ›ã™ã‚‹ãŸã‚ã«ã€msjon.tool ã§ Python を使用ã§ãã¾ã™ +(ãƒãƒ¥ãƒ¼ãƒˆãƒªã‚¢ãƒ«ã¨ã¨ã‚‚ã«ä¾‹ã§ã¯ã“ã®ã‚¹ã‚¿ã‚¤ãƒ«ã‚’使用ã—ã¦ã„ã¾ã™): + +(curl ... | python -mjson.tool) <; rel="http://www.w3.org/ns/json-ld#context"; +type="application/ld+json" + +ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆç®¡ç† +################ + +@context ã®ä½¿ç”¨æ³•を示ã™ãŸã‚ã«ã€ã“ã®ãƒãƒ¥ãƒ¼ãƒˆãƒªã‚¢ãƒ«ã®ã»ã¨ã‚“ã©ã®ä¾‹ã¯ã€ãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã®ãƒœãƒ‡ã‚£ã« @context エントリをæŒã¤ +application/ld+json ã¨ã—ã¦å®Ÿè¡Œã•れã¾ã™ã€‚ã“ã®ã‚»ã‚¯ã‚·ãƒ§ãƒ³ã®æœ€å¾Œã§ã¯ã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆç®¡ç†æ“作㧠Scorpio Broker +を使用ã—ã¦ã‚¢ãƒ—リケーション (コンテキストプロデューサーã¨ã‚³ãƒ³ã‚·ãƒ¥ãƒ¼ãƒžãƒ¼ã®ä¸¡æ–¹) を作æˆã™ã‚‹ãŸã‚ã®åŸºæœ¬çš„ãªçŸ¥è­˜ã‚’ç¿’å¾—ã—ã¾ã™ã€‚ + +****************** +エンティティã®ä½œæˆ +****************** + +æ–°ãŸãªã‚¹ã‚¿ãƒ¼ãƒˆã‚’想定ã™ã‚‹ã¨ã€ç©ºã® ScorpioBroker ãŒã‚りã¾ã™ã€‚ã¾ãšã€``house2:smartrooms:room1`` を作æˆã—ã¾ã™ã€‚ +エンティティã®ä½œæˆæ™‚ã«ã€æ¸©åº¦ãŒ23℃ã§ã‚りã€``smartcity:houses:house2`` ã®ä¸€éƒ¨ã§ã‚ã‚‹ã¨ä»®å®šã—ã¾ã—ょã†ã€‚ +:: + + curl localhost:9090/ngsi-ld/v1/entities -s -S -H 'Content-Type: application/ld+json' -d @- <; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + +ãƒªãƒ—ãƒ©ã‚¤ã¯æ¬¡ã®ã‚ˆã†ã«ãªã‚Šã¾ã™ã€‚ +:: + + { + "id": "smartcity:houses:house2", + "type": "House", + "hasRoom": [{ + "type": "Relationship", + "object": "house2:smartrooms:room1", + "datasetId": "somethingunique1" + }, + { + "type": "Relationship", + "object": "house2:smartrooms:room2", + "datasetId": "somethingunique2" + }], + "location": { + "type": "GeoProperty", + "value": { + "type": "Polygon", + "coordinates": [[[-8.5, 41.2], [-8.5000001, 41.2], [-8.5000001, 41.2000001], [-8.5, 41.2000001], [-8.5, 41.2]]] + } + }, + "entrance": { + "type": "GeoProperty", + "value": { + "type": "Point", + "coordinates": [-8.50000005, 41.2] + } + }, + "@context": [ "https://pastebin.com/raw/Mgxv2ykn" ] + } + +コアコンテキストã¯ç‹¬è‡ªã® @context ã§æä¾›ã™ã‚‹ãŸã‚ã€çµæžœã«ã¯è¿½åŠ ã•れã¾ã›ã‚“。ã“ã“ã‹ã‚‰ã¯ã€ã‚«ã‚¹ã‚¿ãƒ  @context を使用ã—ã¦ã€ +ã™ã¹ã¦ã®ãƒªã‚¯ã‚¨ã‚¹ãƒˆã§çŸ­ã„åå‰ã‚’使用ã§ãるよã†ã«ã—ã¾ã™ã€‚ + +attrs パラメータを使用ã—ã¦ã€æŒ‡å®šã•れãŸå˜ä¸€ã®å±žæ€§ã‚’æŒã¤ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚’リクエストã™ã‚‹ã“ã¨ã‚‚ã§ãã¾ã™ã€‚ãŸã¨ãˆã°ã€å ´æ‰€ +(location) ã®ã¿ã‚’å–å¾—ã™ã‚‹ã«ã¯ã€æ¬¡ã®ã‚ˆã†ã«ã—ã¾ã™: +:: + + curl localhost:9090/ngsi-ld/v1/entities/smartcity%3Ahouses%3Ahouse2/?attrs=location -s -S -H 'Accept: application/ld+json' -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + +レスãƒãƒ³ã‚¹: +:: + + { + "id": "smartcity:houses:house2", + "type": "House", + "location": { + "type": "GeoProperty", + "value": { + "type": "Polygon", + "coordinates": [[[-8.5, 41.2], [-8.5000001, 41.2], [-8.5000001, 41.2000001], [-8.5, 41.2000001], [-8.5, 41.2]]] + } + }, + "@context": [ "https://pastebin.com/raw/Mgxv2ykn" ] + } + +クエリ +##### + +情報をå–å¾—ã™ã‚‹2ç•ªç›®ã®æ–¹æ³•ã¯ã€NGSI-LD クエリã§ã™ã€‚ ã“ã®ä¾‹ã§ã¯ã€æœ€åˆã«åˆ¥ã®å®¶ (house) ã«å±žã™ã‚‹æ–°ã—ã„部屋 (room) +を追加ã—ã¾ã™ã€‚ +:: + + curl localhost:9090/ngsi-ld/v1/entities -s -S -H 'Content-Type: application/ld+json' -d @- <; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + +ã“ã®ãƒªã‚¯ã‚¨ã‚¹ãƒˆã«ã¯ Accept header application/json ãŒã‚ã‚‹ã“ã¨ã«æ³¨æ„ã—ã¦ãã ã•ã„。ã¤ã¾ã‚Šã€@context ã¸ã®ãƒªãƒ³ã‚¯ã¯ +link header ã§è¿”ã•れã¾ã™ã€‚çµæžœã¯ã€ +:: + + [ + { + "id": "house2:smartrooms:room1", + "type": "Room", + "temperature": { + "value": 23, + "unitCode": "CEL", + "type": "Property", + "providedBy": { + "type": "Relationship", + "object": "smartbuilding:house2:sensor0815" + } + }, + "isPartOf": { + "type": "Relationship", + "object": "smartcity:houses:house2" + } + + }, + { + "id": "house2:smartrooms:room2", + "type": "Room", + "temperature": { + "value": 21, + "unitCode": "CEL", + "type": "Property" + "providedBy": { + "type": "Relationship", + "object": "smartbuilding:house2:sensor4711" + } + }, + "isPartOf": { + "type": "Relationship", + "object": "smartcity:houses:house2" + } + }, + { + "id": "house99:smartrooms:room42", + "type": "Room", + "temperature": { + "value": 21, + "unitCode": "CEL", + "type": "Property", + "providedBy": { + "type": "Relationship", + "object": "smartbuilding:house99:sensor36" + } + }, + "isPartOf": { + "type": "Relationship", + "object": "smartcity:houses:house99" + } + } + ] + +フィルタリング +############# + +NGSI-LD ã¯ã€ã‚¯ã‚¨ãƒªçµæžœ ãŠã‚ˆã³ã‚µãƒ–スクリプションã®ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションã‹ã‚‰ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚’フィルタリングã™ã‚‹ãŸã‚ã® +多ãã®æ–¹æ³•ã‚’æä¾›ã—ã¾ã™ã€‚ç§ãŸã¡ã¯ ``smartcity:houses:house2`` ã«ã®ã¿èˆˆå‘³ãŒã‚ã‚‹ã®ã§ã€Relatioship isPartOf ã§ 'q' +フィルターを使用ã—ã¦ã„ã¾ã™ã€‚(URL エンコーディング㧠``smartcity:houses:house2`` 㯠%22smartcity%3Ahouses%3Ahouse2%22 +ã«ãªã‚Šã¾ã™) +:: + + curl localhost:9090/ngsi-ld/v1/entities/?type=Room\&q=isPartOf==%22smartcity%3Ahouses%3Ahouse2%22 -s -S -H 'Accept: application/json' -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + +çµæžœã¯æ¬¡ã®ã‚ˆã†ã«ãªã‚Šã¾ã™ã€‚ +:: + + [ + { + "id": "house2:smartrooms:room1", + "type": "Room", + "temperature": { + "value": 23, + "unitCode": "CEL", + "type": "Property", + "providedBy": { + "type": "Relationship", + "object": "smartbuilding:house2:sensor0815" + } + }, + "isPartOf": { + "type": "Relationship", + "object": "smartcity:houses:house2" + } + + }, + { + "id": "house2:smartrooms:room2", + "type": "Room", + "temperature": { + "value": 21, + "unitCode": "CEL", + "type": "Property" + "providedBy": { + "type": "Relationship", + "object": "smartbuilding:house2:sensor4711" + } + }, + "isPartOf": { + "type": "Relationship", + "object": "smartcity:houses:house2" + } + } + ] + +åŒã˜çµæžœã‚’å¾—ã‚‹åˆ¥ã®æ–¹æ³•ã¯ã€idPattern パラメータを使用ã™ã‚‹ã“ã¨ã§ã™ã€‚ã“れã«ã‚ˆã‚Šã€æ­£è¦è¡¨ç¾ã‚’使用ã§ãã¾ã™ã€‚ã“ã®å ´åˆã€éƒ¨å±‹ +(rooms) ã® IDs を構造化ã—ãŸãŸã‚ã€ã“れãŒå¯èƒ½ã§ã™ã€‚ +:: + + curl localhost:9090/ngsi-ld/v1/entities/?type=Room\&idPattern=house2%3Asmartrooms%3Aroom.%2A -s -S -H 'Accept: application/json' -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + (house2%3Asmartrooms%3Aroom.%2A == house2:smartrooms:room.*) + +å±žæ€§ã‚’åˆ¶é™ +########## + +ã•らã«ã€æ¸©åº¦ (temperature) ã®ã¿ã‚’与ãˆã‚‹ã‚ˆã†ã«çµæžœã‚’制é™ã—ãŸã„ã¨æ€ã„ã¾ã™ã€‚ã“れã¯ã€attrs パラメータを使用ã—ã¦å®Ÿè¡Œ +ã•れã¾ã™ã€‚Attrs ã¯ã‚³ãƒ³ãƒžåŒºåˆ‡ã‚Šã®ãƒªã‚¹ãƒˆã‚’å–りã¾ã™ã€‚ç§ãŸã¡ã®å ´åˆã€ã‚¨ãƒ³ãƒˆãƒªã¯1ã¤ã ã‘ãªã®ã§ã€æ¬¡ã®ã‚ˆã†ã«ãªã‚Šã¾ã™ã€‚ +:: + + curl localhost:9090/ngsi-ld/v1/entities/?type=Room&q=isPartOf==%22smartcity%3Ahouses%3Ahouse2%22\&attrs=temperature -s -S -H 'Accept: application/json' -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + +:: + + [ + { + "id": "house2:smartrooms:room1", + "type": "Room", + "temperature": { + "value": 23, + "unitCode": "CEL", + "type": "Property", + "providedBy": { + "type": "Relationship", + "object": "smartbuilding:house2:sensor0815" + } + } + + }, + { + "id": "house2:smartrooms:room2", + "type": "Room", + "temperature": { + "value": 21, + "unitCode": "CEL", + "type": "Property" + "providedBy": { + "type": "Relationship", + "object": "smartbuilding:house2:sensor4711" + } + } + } + ] + +KeyValues ã®çµæžœ +################ + +ã“ã“ã§ã€å®Ÿéš›ã«ã¯æ¸©åº¦ (temperature) ã®å€¤ã®ã¿ã«é–¢å¿ƒãŒã‚りã€ãƒ¡ã‚¿æƒ…å ±ã¯æ°—ã«ã—ãªã„ãŸã‚ã€ãƒªã‚¯ã‚¨ã‚¹ãƒˆã®ãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã‚’ã•ら㫠+制é™ã—ãŸã„ã¨ã—ã¾ã™ã€‚ã“れã¯ã€keyValues オプションを使用ã—ã¦å®Ÿè¡Œã§ãã¾ã™ã€‚KeyValues ã¯ã€æœ€ä¸Šä½ã®å±žæ€§ã¨ãれãžã‚Œã®å€¤ +ã¾ãŸã¯ã‚ªãƒ–ジェクトã®ã¿ã‚’æä¾›ã™ã‚‹ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã®åœ§ç¸®ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã‚’è¿”ã—ã¾ã™ã€‚ +:: + + curl localhost:9090/ngsi-ld/v1/entities/?type=Room\&q=isPartOf==%22smartcity%3Ahouses%3Ahouse2%22\&attrs=temperature\&options=keyValues -s -S -H 'Accept: application/json' -H 'Link: ; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + +レスãƒãƒ³ã‚¹: +:: + + [ + { + "id": "house2:smartrooms:room1", + "type": "Room", + "temperature": 23 + }, + { + "id": "house2:smartrooms:room2", + "type": "Room", + "temperature": 21 + } + ] + +**************************************** +ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã®æ›´æ–°ã¨ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã¸ã®è¿½åŠ  +**************************************** + +NGSI-LD を使用ã™ã‚‹ã¨ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚’æ›´æ–° (ç¾åœ¨ã®ã‚¨ãƒ³ãƒˆãƒªã‚’上書ã) ã™ã‚‹ã ã‘ã§ãªãã€æ–°ã—ã„属性を追加ã™ã‚‹ã“ã¨ã‚‚ã§ãã¾ã™ã€‚ +ã•らã«ã€ã‚‚ã¡ã‚ん特定ã®å±žæ€§ã‚’æ›´æ–°ã™ã‚‹ã“ã¨ã‚‚ã§ãã¾ã™ã€‚``house2:smartrooms:room1`` ã®æ¸©åº¦ (temperature) +ã®ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆãƒ—ロデューサーã®å½¹å‰²ã‚’æžœãŸã—ã¦ã€5ã¤ã®ã‚·ãƒŠãƒªã‚ªã‚’ã‚«ãƒãƒ¼ã—ã¾ã™ã€‚ + +1. エンティティ全体を更新ã—ã¦ã€æ–°ã—ã„値をプッシュã—ã¾ã™ã€‚ +2. 部屋 (room) ã‹ã‚‰ã®æ¹¿åº¦ (humidity) ã‚’æä¾›ã™ã‚‹æ–°ã—ã„プロパティを追加ã—ã¾ã™ã€‚ +3. 温度 (temperature) ã®å€¤ã‚’éƒ¨åˆ†çš„ã«æ›´æ–°ã—ã¾ã™ã€‚ +4. æ–°ã—ã„複数値エントリを温度 (temperature) ã«è¿½åŠ ã—ã¦ã‚±ãƒ«ãƒ“ãƒ³åº¦ã§æƒ…報をæä¾›ã—ã¾ã™ã€‚ +5. 温度 (temperature) ã¨è¯æ° (Fahrenheit) ã®ç‰¹å®šã®è¤‡æ•°å€¤ã‚¨ãƒ³ãƒˆãƒªã‚’æ›´æ–°ã—ã¾ã™ã€‚ + +ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã®æ›´æ–° +################## + +基本的ã«ã€2ã¤ã®ä¾‹å¤–を除ã„ã¦ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã®ã™ã¹ã¦ã®éƒ¨åˆ†ã‚’æ›´æ–°ã§ãã¾ã™ã€‚type 㨠id ã¯ä¸å¤‰ã§ã™ã€‚NGSI-LD ã®æ›´æ–°ã«ã‚ˆã‚Šã€ +既存ã®ã‚¨ãƒ³ãƒˆãƒªãŒä¸Šæ›¸ãã•れã¾ã™ã€‚ã“れã¯ã€ç¾åœ¨å­˜åœ¨ã™ã‚‹å±žæ€§ã‚’å«ã¾ãªã„ペイロードã§ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚’æ›´æ–°ã™ã‚‹ã¨ã€ +ãã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ãŒå‰Šé™¤ã•れるã“ã¨ã‚’æ„味ã—ã¾ã™ã€‚room1 ã‚’æ›´æ–°ã™ã‚‹ã«ã¯ã€æ¬¡ã®ã‚ˆã†ãª HTTP POST を実行ã—ã¾ã™ã€‚ +:: + + curl localhost:9090/ngsi-ld/v1/entities/house2%3Asmartrooms%3Aroom1 -s -S -H 'Content-Type: application/json' -H 'Link: https://pastebin.com/raw/Mgxv2ykn' -d @- </attrs/ +ã® POST ã«ã‚ˆã£ã¦è¡Œã‚れã¾ã™ã€‚温度 (temperature) ã‚’æ›´æ–°ã™ã‚‹ãŸã‚ã«ã€æ¬¡ã®ã‚ˆã†ãª POST を実行ã—ã¾ã™ã€‚ +:: + + curl localhost:9090/ngsi-ld/v1/entities/house2%3Asmartrooms%3Aroom1/attrs/temperature -s -S -H 'Content-Type: application/json' -H 'Link: https://pastebin.com/raw/Mgxv2ykn' -d @- </attrs/ ã§ HTTP PATCH +コマンドを実行ã—ã¾ã™ã€‚デフォルト㧠NGSI-LD ã«è¿½åŠ ã™ã‚‹ã¨ã€æ—¢å­˜ã®ã‚¨ãƒ³ãƒˆãƒªãŒä¸Šæ›¸ãã•れã¾ã™ã€‚ã“ã‚ŒãŒæœ›ã¾ã—ããªã„å ´åˆã¯ã€ +/entities//attrs?options=noOverwrite ã®ã‚ˆã†ã« URL ã« noOverwrite を使用ã—ã¦ã‚ªãƒ—ション パラメータを +追加ã§ãã¾ã™ã€‚ã“ã“ã§ã€room1 ã®æ¹¿åº¦ (humidity) ã®ã‚¨ãƒ³ãƒˆãƒªã‚’追加ã™ã‚‹å ´åˆã¯ã€æ¬¡ã®ã‚ˆã†ã« HTTP PATCH を実行ã—ã¾ã™ã€‚ +:: + + curl localhost:9090/ngsi-ld/v1/entities/house2%3Asmartrooms%3Aroom1/attrs -s -S -X PATCH -H 'Content-Type: application/json' -H 'Link: https://pastebin.com/raw/Mgxv2ykn' -d @- <:@]:[]/[[/]...]。 +ã—ãŸãŒã£ã¦ã€ã‚µãƒ–スクリプションã¯é€šå¸¸æ¬¡ã®ã‚ˆã†ã«ãªã‚Šã¾ã™ã€‚ +:: + + curl localhost:9090/ngsi-ld/v1/subscriptions -s -S -H 'Content-Type: application/ld+json' -d @- <; rel="http://www.w3.org/ns/json-ld#context"; type="application/ld+json"' + +オリジナルã®ãƒ¬ã‚¸ã‚¹ãƒˆãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã¨ã€ã‚¿ã‚¤ãƒ—ルーム (type Room) ã«ãƒ¬ã‚¸ã‚¹ãƒˆãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã•れã¦ã„ã‚‹ã™ã¹ã¦ã®ã‚‚ã®ãŒè¿”ã•れã¾ã™ã€‚ + +通常ã®ã‚¯ã‚¨ãƒªã¨ã‚µãƒ–スクリプションã§ã®ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆãƒ¬ã‚¸ã‚¹ãƒˆãƒªã®ä½¿ç”¨ +################################################################ + +コンテキストレジストリエントリã«ã¯ã€é€šå¸¸ã®ã‚¯ã‚¨ãƒªã¾ãŸã¯ã‚µãƒ–スクリプション㌠Scorpio ã«åˆ°ç€ã—ãŸã¨ãã«è€ƒæ…®ã•れる複数㮠+エントリをå«ã‚ã‚‹ã“ã¨ãŒã§ãã¾ã™ã€‚ã”覧ã®ã¨ãŠã‚Šã€ã‚µãƒ–スクリプションã«ã‚ã‚‹ã‚‚ã®ã¨åŒæ§˜ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚¨ãƒ³ãƒˆãƒªãŒã‚りã¾ã™ã€‚ +ã“ã‚Œã¯æœ€åˆã«è€ƒæ…®ã™ã¹ãã“ã¨ã§ã™ã€‚タイプ (type) をレジストレーションã™ã‚‹ã¨ã€Scorpio ã¯ãã®ã‚¿ã‚¤ãƒ—ã«ä¸€è‡´ã™ã‚‹ +リクエストã®ã¿ã‚’転é€ã—ã¾ã™ã€‚åŒæ§˜ã«ã€å ´æ‰€ (location) ã¯ã€åœ°ç†ã‚¯ã‚¨ãƒªéƒ¨åˆ†ã‚’å«ã‚€ã‚¯ã‚¨ãƒªã‚’転é€ã™ã‚‹ã‹ã©ã†ã‹ã‚’決定ã™ã‚‹ãŸã‚ã« +使用ã•れã¾ã™ã€‚やりã™ãŽã¦ã¯ã„ã‘ã¾ã›ã‚“ãŒã€ãƒ¬ã‚¸ã‚¹ãƒˆãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã§è©³ç´°ã‚’æä¾›ã™ã‚Œã°ã™ã‚‹ã»ã©ã€ã‚·ã‚¹ãƒ†ãƒ ã¯ãƒªã‚¯ã‚¨ã‚¹ãƒˆã®è»¢é€å…ˆã® +コンテキストソースをより効率的ã«åˆ¤æ–­ã§ãるよã†ã«ãªã‚Šã¾ã™ã€‚以下ã«ã€ã‚ˆã‚Šå¤šãã®ãƒ—ロパティãŒè¨­å®šã•れãŸä¾‹ã‚’示ã—ã¾ã™ã€‚ +:: + + { + "id": "urn:ngsi-ld:ContextSourceRegistration:csr1a3459", + "type": "ContextSourceRegistration", + "name": "NameExample", + "description": "DescriptionExample", + "information": [ + { + "entities": [ + { + "type": "Vehicle" + } + ], + "properties": [ + "brandName", + "speed" + ], + "relationships": [ + "isParked" + ] + }, + { + "entities": [ + { + "idPattern": ".*downtown$", + "type": "OffStreetParking" + } + ] + } + ], + "endpoint": "http://my.csource.org:1026", + "location": "{ \"type\": \"Polygon\", \"coordinates\": [[[8.686752319335938,49.359122687528746],[8.742027282714844,49.3642654834877],[8.767433166503904,49.398462568451485],[8.768119812011719,49.42750021620163],[8.74305725097656,49.44781634951542],[8.669242858886719,49.43754770762113],[8.63525390625,49.41968407776289],[8.637657165527344,49.3995797187007],[8.663749694824219,49.36851347448498],[8.686752319335938,49.359122687528746]]] }" + } + +情報部分ã«ã¯2ã¤ã®ã‚¨ãƒ³ãƒˆãƒªãŒã‚りã¾ã™ã€‚最åˆã«ã€ãã®ã‚½ãƒ¼ã‚¹ã«ã‚ˆã£ã¦æä¾›ã•れる2ã¤ã®ãƒ—ロパティã¨1ã¤ã®ãƒªãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã‚·ãƒƒãƒ—ã‚’ +説明ã™ã‚‹2ã¤ã®è¿½åŠ ã‚¨ãƒ³ãƒˆãƒªãŒã‚ã‚‹ã“ã¨ãŒã‚ã‹ã‚Šã¾ã™ã€‚ã¤ã¾ã‚Šã€å±žæ€§ãƒ•ィルターãªã—ã§ type Vehicle をリクエストã™ã‚‹ã‚¯ã‚¨ãƒªã¯ +ã™ã¹ã¦ã“ã®ã‚½ãƒ¼ã‚¹ã«è»¢é€ã•れã€å±žæ€§ãƒ•ィルターãŒã‚ã‚‹å ´åˆã¯ã€ãƒ¬ã‚¸ã‚¹ãƒˆãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã•れãŸãƒ—ロパティã¾ãŸã¯ãƒªãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã‚·ãƒƒãƒ—㌠+一致ã™ã‚‹å ´åˆã«ã®ã¿è»¢é€ã•れã¾ã™ã€‚2番目ã®ã‚¨ãƒ³ãƒˆãƒªã¯ã€ã“ã®ã‚½ãƒ¼ã‚¹ãŒ "downtown" ã§çµ‚ã‚るエンティティ ID ã‚’æŒã¤ +type OffStreetParking ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚’æä¾›ã§ãã‚‹ã“ã¨ã‚’æ„味ã—ã¾ã™ã€‚ diff --git a/scorpio-broker/docs/ja/source/HelloWorld.rst b/scorpio-broker/docs/ja/source/HelloWorld.rst new file mode 100644 index 0000000000000000000000000000000000000000..bc9324aff0591b45567da392f52a4155be0ba0c9 --- /dev/null +++ b/scorpio-broker/docs/ja/source/HelloWorld.rst @@ -0,0 +1,44 @@ +**************** +Hello World ã®ä¾‹ +**************** + +一般的ã«è¨€ãˆã°ã€æ¬¡ã®ã‚ˆã†ãªãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã§ HTTP POST リクエストを *http://localhost:9090/ngsi-ld/v1/entities/* ã«é€ä¿¡ã™ã‚‹ +ã“ã¨ã§ã€Scorpio Broker ã® Hello World プログラムã®ã‚ˆã†ãªã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚’作æˆã§ãã¾ã™ã€‚ + +.. code-block:: JSON + + curl localhost:9090/ngsi-ld/v1/entities -s -S -H 'Content-Type: application/json' -d @- + { + "id": "urn:ngsi-ld:testunit:123", + "type": "AirQualityObserved", + "dateObserved": { + "type": "Property", + "value": { + "@type": "DateTime", + "@value": "2018-08-07T12:00:00Z" + } + }, + "NO2": { + "type": "Property", + "value": 22, + "unitCode": "GP", + "accuracy": { + "type": "Property", + "value": 0.95 + } + }, + "refPointOfInterest": { + "type": "Relationship", + "object": "urn:ngsi-ld:PointOfInterest:RZ:MainSquare" + }, + "@context": [ + "https://schema.lab.fiware.org/ld/context", + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld" + ] + } + +与ãˆã‚‰ã‚ŒãŸä¾‹ã§ã¯ã€@context ãŒãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã«ã‚ã‚‹ãŸã‚ã€Content-Type header ã‚’ application/ld+json ã«è¨­å®šã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ + +エンティティをå—ä¿¡ã™ã‚‹ã«ã¯ã€HTTP GET ã‚’é€ä¿¡ã—ã¾ã™ã€‚ + + **http://localhost:9090/ngsi-ld/v1/entities/** diff --git a/scorpio-broker/docs/ja/source/buildScorpio.rst b/scorpio-broker/docs/ja/source/buildScorpio.rst new file mode 100644 index 0000000000000000000000000000000000000000..9d9607a8f10f4474d8a661349d05e9d2d0bb89de --- /dev/null +++ b/scorpio-broker/docs/ja/source/buildScorpio.rst @@ -0,0 +1,497 @@ +************************************** +docker-compose を介ã—㦠Scorpio ã‚’èµ·å‹• +************************************** + +コピーã™ã‚‹ã‚³ãƒžãƒ³ãƒ‰ã‚’é–‹å§‹ +######################## + + +Scorpio ã‚’å§‹ã‚る最も簡å˜ãªæ–¹æ³•ã‚’ãŠæŽ¢ã—ã§ã™ã‹ï¼Ÿã“れã§ã™ã€‚ +:: + + curl https://raw.githubusercontent.com/ScorpioBroker/ScorpioBroker/development/docker-compose-aaio.yml + sudo docker-compose -f docker-compose-aaio.yml up + + +イントロダクション +################## + +Scorpio ã‚’èµ·å‹•ã™ã‚‹æœ€ã‚‚ç°¡å˜ãªæ–¹æ³•ã¯ã€docker-compose を使用ã™ã‚‹ã“ã¨ã§ã™ã€‚dockerhub ã«ä¾å­˜ã™ã‚‹2ã¤ã®ãƒ¡ã‚¤ãƒ³ docker-compose +ファイルをæä¾›ã—ã¾ã™ã€‚docker-compose-aaio.yml ãŠã‚ˆã³ docker-compose-dist.yml ã§ã™ã€‚ã“ã®ãƒ•ァイルをãã®ã¾ã¾ä½¿ç”¨ã—㦠+Scorpio ã‚’èµ·å‹•ã§ãã¾ã™ã€‚分散ãƒãƒªã‚¢ãƒ³ãƒˆã§ Scorpio を実行ã™ã‚‹å ´åˆã¯ã€ä¸Šè¨˜ã®ã‚³ãƒžãƒ³ãƒ‰ã§ yml ファイルを交æ›ã—ã¾ã™ã€‚ + +docker-compose-aaio.yml +####################### + +ã“ã“ã§ã® AAIO ã¯ã€ã»ã¼ã™ã¹ã¦ã‚’1ã¤ã«ã¾ã¨ã‚ãŸã‚‚ã®ã§ã™ã€‚ã“ã®ãƒãƒªã‚¢ãƒ³ãƒˆã§ã¯ã€Scorpio ã®ã‚³ã‚¢ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã¨ Spring Cloud +コンãƒãƒ¼ãƒãƒ³ãƒˆãŒ1ã¤ã®ã‚³ãƒ³ãƒ†ãƒŠå†…ã§é–‹å§‹ã•れã¾ã™ã€‚追加ã®ã‚³ãƒ³ãƒ†ãƒŠã¯ Kafka 㨠Postgres ã®ã¿ã§ã™ã€‚テストãŠã‚ˆã³å°è¦æ¨¡ã‹ã‚‰ +ä¸­è¦æ¨¡ã®å±•é–‹ã®å ´åˆã€ã“れã¯ãŠãらã使用ã—ãŸã„ã‚‚ã®ã§ã™ã€‚ + +docker-compose-dist.yml +####################### + +ã“ã®ãƒãƒªã‚¢ãƒ³ãƒˆã§ã¯ã€å„ Scorpio コンãƒãƒ¼ãƒãƒ³ãƒˆã¯ç•°ãªã‚‹ã‚³ãƒ³ãƒ†ãƒŠã§é–‹å§‹ã•れã¾ã™ã€‚ã“れã«ã‚ˆã‚Šã€æŸ”軟性ãŒé«˜ããªã‚Šã€å€‹ã€…ã® +コンãƒãƒ¼ãƒãƒ³ãƒˆã‚’ç½®ãæ›ãˆãŸã‚Šã€ä¸€éƒ¨ã®ã‚³ã‚¢ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã®æ–°ã—ã„インスタンスを開始ã—ãŸã‚Šã§ãã¾ã™ã€‚ + +環境変数を介ã—㦠Docker ã‚¤ãƒ¡ãƒ¼ã‚¸ã‚’æ§‹æˆ +###################################### + +Docker ã«ç’°å¢ƒå¤‰æ•°ã‚’入力ã™ã‚‹æ–¹æ³•ã¯è¤‡æ•°ã‚りã¾ã™ã€‚ãれらã™ã¹ã¦ã‚’通éŽã™ã‚‹ã®ã§ã¯ãªãã€docker-compose ファイルã®ã¿ã‚’ +通éŽã—ã¾ã™ã€‚ãŸã ã—ã€Scorpio 関連ã®éƒ¨åˆ†ã¯ã“れらã™ã¹ã¦ã®ãƒãƒªã‚¢ãƒ³ãƒˆã«é©ç”¨ã•れã¾ã™ã€‚Scorpio ã®æ§‹æˆã¯ã€Spring Cloud +æ§‹æˆã‚·ã‚¹ãƒ†ãƒ ã‚’介ã—ã¦è¡Œã‚れã¾ã™ã€‚使用ã•れるパラメータã¨ãƒ‡ãƒ•ォルト値ã®å®Œå…¨ãªæ¦‚è¦ã«ã¤ã„ã¦ã¯ã€AllInOneRunner ã® +application.yml ã‚’å‚ç…§ã—ã¦ãã ã•ã„。 +https://github.com/ScorpioBroker/ScorpioBroker/blob/development/AllInOneRunner/src/main/resources/application-aaio.yml。 +æ–°ã—ã„設定をæä¾›ã™ã‚‹ã«ã¯ã€docker-compose ファイルã®ç’°å¢ƒã‚¨ãƒ³ãƒˆãƒªã‚’介ã—ã¦ãれらをæä¾›ã§ãã¾ã™ã€‚設定ã™ã‚‹å¤‰æ•°ã¯ spring_args +ã¨å‘¼ã°ã‚Œã¾ã™ã€‚ã“ã®ã‚ªãƒ—ション㯠Scorpio コンテナã«ã®ã¿è¨­å®šã™ã‚‹ãŸã‚ã€æ¬¡ã®ã‚ˆã†ã« Scorpio コンテナエントリã®ã‚µãƒ–パーツ㫠+ã—ã¾ã™ã€‚ +:: + + scorpio: + image: scorpiobroker/scorpio:scorpio-aaio_1.0.0 + ports: + - "9090:9090" + depends_on: + - kafka + - postgres + environment: + spring_args: --maxLimit=1000 + +ã“れã«ã‚ˆã‚Šã€ã‚¯ã‚¨ãƒªã®ãƒªãƒ—ãƒ©ã‚¤ã®æœ€å¤§åˆ¶é™ã‚’デフォルトã®500ã§ã¯ãªã1000ã«è¨­å®šã—ã¾ã™ã€‚ + + +Docker ã‚’é™ã‹ã« +############### + +一部㮠Docker コンテナã¯éžå¸¸ã«ãƒŽã‚¤ã‚ºãŒå¤šãã€ãã®ã™ã¹ã¦ã®å‡ºåŠ›ãŒå¿…è¦ã§ã¯ã‚りã¾ã›ã‚“。簡å˜ãªè§£æ±ºç­–ã¯ã“れを追加ã™ã‚‹ã“ã¨ã§ã™ã€‚ +:: + + logging: + driver: none + + +docker-compose ファイルã§ãれãžã‚Œã®ã‚³ãƒ³ãƒ†ãƒŠãƒ¼æ§‹æˆã«è¿½åŠ ã—ã¾ã™ã€‚ 例ãˆã°ã€Kafka ã‚’é™ã‹ã«ã™ã‚‹ãŸã‚ã«ã€‚ +:: + + kafka: + image: wurstmeister/kafka + hostname: kafka + ports: + - "9092" + environment: + KAFKA_ADVERTISED_HOST_NAME: kafka + KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ADVERTISED_PORT: 9092 + KAFKA_LOG_RETENTION_MS: 10000 + KAFKA_LOG_RETENTION_CHECK_INTERVAL_MS: 5000 + volumes: + - /var/run/docker.sock:/var/run/docker.sock + depends_on: + - zookeeper + logging: + driver: none + +************** +æ§‹æˆãƒ‘ラメータ +************** + +Scorpio 㯠Spring Cloud/Boot æ§‹æˆã‚·ã‚¹ãƒ†ãƒ ã‚’使用ã—ã¦ã„ã¾ã™ã€‚ã“れã¯ã€å¯¾å¿œã™ã‚‹ãƒ•ォルダー内㮠application.yml ファイルを +介ã—ã¦è¡Œã‚れã¾ã™ã€‚AllInOneRunner ã«ã¯ã€ä½¿ç”¨å¯èƒ½ãªã™ã¹ã¦ã®æ§‹æˆã‚ªãƒ—ションã®å®Œå…¨ãªã‚»ãƒƒãƒˆãŒå«ã¾ã‚Œã¦ã„ã¾ã™ã€‚ + +ã“れらã¯ã€ä¸Šè¨˜ã®ã‚ˆã†ã«ã‚³ãƒžãƒ³ãƒ‰ãƒ©ã‚¤ãƒ³ã¾ãŸã¯ Docker ã®å ´åˆã«ä¸Šæ›¸ãã§ãã¾ã™ã€‚ + ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| æ§‹æˆã‚ªãƒ—ション | 説明 | デフォルト値 | ++===================+=================================================+================================================================================+ +| atcontext.url | 内部コンテキストサーãƒãƒ¼ã«ä½¿ç”¨ã•れる URL | http://localhost:9090/ngsi-ld/contextes/ | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| bootstrap.servers | 内部 Kafka ã®ãƒ›ã‚¹ãƒˆã¨ãƒãƒ¼ãƒˆ | kafka:9092 (default used for docker) | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| broker.id a | ブローカーã®ä¸€æ„ã® id。フェデレーションã«å¿…è¦ | Broker1 | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| broker.parent. | フェデレーション設定ã§ã®è¦ªãƒ–ローカーã®url | SELF (フェデレーションãªã—ã‚’æ„味ã™ã‚‹) | +| location.url | | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| broker. | ã‚«ãƒãƒ¬ãƒƒã‚¸ã® GeoJSON ã®èª¬æ˜Žã€‚フェデレーション | empty | +| geoCoverage | 設定ã§ã®ç™»éŒ²ã«ä½¿ç”¨ã•れã¾ã™ã€‚ | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| defaultLimit | 制é™ãŒæŒ‡å®šã•れã¦ã„ãªã„å ´åˆã®ã‚¯ã‚¨ãƒªã®ãƒ‡ãƒ•ォルト | 50 | +| | ã®åˆ¶é™ | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| maxLimit | クエリã®çµæžœã®æœ€å¤§æ•° | 500 | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| reader.datasource | ã“ã“ã§ postgres ã®è¨­å®šã‚’変更ã™ã‚‹å ´åˆã¯ã€ | ngb | +| .hikari.password | パスワードを設定ã—ã¾ã™ | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| reader.datasource | postgres ã¸ã® JDBC URL | jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_reader | +| .hikari.url | | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| reader.datasource | postgres データベースã®ãƒ¦ãƒ¼ã‚¶ãƒ¼å | ngb | +| .hikari.username | | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| writer.datasource | ã“ã“ã§ postgres ã®è¨­å®šã‚’変更ã™ã‚‹å ´åˆã¯ã€ | ngb | +| .hikari.password | パスワードを設定ã—ã¾ã™ | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| writer.datasource | postgres ã¸ã® JDBC URL | jdbc:postgresql://postgres:5432/ngb?ApplicationName=ngb_storagemanager_writer | +| .hikari.url | | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ +| writer.datasource | postgres データベースã®ãƒ¦ãƒ¼ã‚¶ãƒ¼å | ngb | +| .hikari.username | | | ++-------------------+-------------------------------------------------+--------------------------------------------------------------------------------+ + +************************* +ソースã‹ã‚‰ Scorpio を構築 +************************* + +Scorpio ã¯ã€ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スフレームワークã¨ã—㦠Spring Cloud を使用ã—ã€ãƒ“ルドツールã¨ã—㦠Apache Maven を使用ã—㦠+Java ã§é–‹ç™ºã•れã¦ã„ã¾ã™ã€‚一部ã®ãƒ†ã‚¹ãƒˆã§ã¯ã€Apache Kafka メッセージãƒã‚¹ã‚’実行ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ (詳細ã«ã¤ã„ã¦ã¯ã€ +セットアップã®ç« ã‚’å‚ç…§ã—ã¦ãã ã•ã„)。ã“れらã®ãƒ†ã‚¹ãƒˆã‚’スキップã—ãŸã„å ´åˆã¯ã€å®Ÿè¡Œ ``mvn clean package -DskipTests`` +ã—ã¦å€‹ã€…ã®ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スを構築ã™ã‚‹ã“ã¨ãŒã§ãã¾ã™ã€‚ + +ビルドã«é–¢ã™ã‚‹ä¸€èˆ¬çš„ãªæ³¨æ„ +########################## + +ã“ã®ãƒ‰ã‚­ãƒ¥ãƒ¡ãƒ³ãƒˆã®ã•らã«ä¸‹ã«ã¯ã€ã•ã¾ã–ã¾ãªãƒ•レーãƒãƒ¼ã®æ­£ç¢ºãªãƒ“ルドコマンド/引数ãŒã‚りã¾ã™ã€‚ã“ã®ãƒ‘ートã§ã¯ã€ +ã•ã¾ã–ã¾ãªå¼•æ•°ãŒã©ã®ã‚ˆã†ã«æ©Ÿèƒ½ã™ã‚‹ã‹ã«ã¤ã„ã¦æ¦‚è¦ã‚’説明ã—ã¾ã™ã€‚ + +Maven プロファイル +----------------- + +ç¾åœ¨ã€3ã¤ã®åˆ©ç”¨å¯èƒ½ãª Maven ビルドプロファイルãŒã‚りã¾ã™ã€‚ + +デフォルト +~~~~~~~~~~ + +-P 引数を指定ã—ãªã„å ´åˆã€Maven ã¯ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“ス㨠AllInOneRunner ã®å€‹åˆ¥ã® jar ファイルを生æˆã—ã€å„ "full" +マイクロサービスãŒãƒ‘ッケージ化ã•れã¾ã™ (ã“れã«ã‚ˆã‚Šã€AllInOneRunner ã®ã‚µã‚¤ã‚ºã¯ç´„500 MBã«ãªã‚Šã¾ã™) + +docker +~~~~~~ + +ã“れã«ã‚ˆã‚Šã€Maven ãŒãƒˆãƒªã‚¬ãƒ¼ã•れã¦å„マイクロサービス㮠Docker ã‚³ãƒ³ãƒ†ãƒŠãŒæ§‹ç¯‰ã•れã¾ã™ã€‚ + +docker-aaio +~~~~~~~~~~~ + +ã“れã«ã‚ˆã‚Šã€Maven ãŒãƒˆãƒªã‚¬ãƒ¼ã•れã€AllInOneRunner 㨠Spring Cloud コンãƒãƒ¼ãƒãƒ³ãƒˆ (eureka, configserver, gateway) +ã‚’å«ã‚€1ã¤ã® Docker ã‚³ãƒ³ãƒ†ãƒŠãŒæ§‹ç¯‰ã•れã¾ã™ã€‚ + +Maven ã®å¼•æ•° +~~~~~~~~~~~~ + +ã“れらã®å¼•æ•°ã¯ã€ã‚³ãƒžãƒ³ãƒ‰ãƒ©ã‚¤ãƒ³ã® -D を介ã—ã¦æä¾›ã•れã¾ã™ã€‚ + +skipTests +~~~~~~~~~ + +ビルドを高速化ã—ãŸã„å ´åˆã€ã¾ãŸã¯ä¸€éƒ¨ã®ãƒ†ã‚¹ãƒˆã§å¿…è¦ãª Kafka +インスタンスを実行ã—ã¦ã„ãªã„å ´åˆã¯ã€ä¸€èˆ¬çš„ã«æŽ¨å¥¨ã•れã¾ã™ã€‚ + +skipDefault +~~~~~~~~~~~ + +ã“れ㯠Scorpio ビルドã®ç‰¹åˆ¥ãªè­°è«–ã§ã™ã€‚ã“ã®å¼•æ•°ã¯ã€å€‹ã€…ã®ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スã®ã‚¹ãƒ—リングã®å†ãƒ‘ックを無効ã«ã—ã€ã‚ˆã‚Šå°ã•㪠+AllInOneRunner jar ファイルを許å¯ã—ã¾ã™ã€‚ã“ã®å¼•æ•°ã¯ã€docker-aaio プロファイルã¨çµ„ã¿åˆã‚ã›ã¦ã®ã¿ä½¿ç”¨ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ + +Spring プロファイル +------------------- + +Spring ã¯ã€jar ファイルã®èµ·å‹•時ã«ã‚¢ã‚¯ãƒ†ã‚£ãƒ–化ã§ãるプロファイルもサãƒãƒ¼ãƒˆã—ã¦ã„ã¾ã™ã€‚ç¾åœ¨ã€Scorpio ã§æ´»ç™ºã«ä½¿ç”¨ã•れ㦠+ã„ã‚‹3ã¤ã®ãƒ—ロファイルãŒã‚りã¾ã™ã€‚デフォルトã®ãƒ—ロファイルã¯ã€ãƒ‡ãƒ•ォルトã®ã‚»ãƒƒãƒˆã‚¢ãƒƒãƒ—ãŒå€‹ã€…ã®ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スã§ã‚る㨠+想定ã—ã¦ã„ã¾ã™ã€‚例外㯠AllInOneRunner ã§ã€ãƒ‡ãƒ•ォルトã§ã¯ docker-aaio セットアップã§å®Ÿè¡Œã•れã¦ã„ã‚‹ã¨æƒ³å®šã•れã¦ã„ã¾ã™ã€‚ + +ç¾åœ¨ã€AllInOneRunner ã¨çµ„ã¿åˆã‚ã›ãŸã‚²ãƒ¼ãƒˆã‚¦ã‚§ã‚¤ã‚’除ã„ã¦ã€ãƒ‡ãƒ•ォルトã®ãƒ—ロファイルã§ã™ã¹ã¦ã‚’実行ã§ãã‚‹ã¯ãšã§ã™ã€‚ã“れら +2ã¤ã‚’一緒ã«ä½¿ç”¨ã™ã‚‹ã«ã¯ã€aaio spring プロファイルã§ã‚²ãƒ¼ãƒˆã‚¦ã‚§ã‚¤ã‚’é–‹å§‹ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ã“れã¯ã€ã“れを開始コマンド +-Dspring.profiles.active=aaio ã«ã‚¢ã‚¿ãƒƒãƒã™ã‚‹ã“ã¨ã§å®Ÿè¡Œã§ãã¾ã™ã€‚ + +ã•らã«ã€ä¸€éƒ¨ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã«ã¯ã€é–‹ç™ºç›®çš„ã®ã¿ã‚’目的ã¨ã—ãŸé–‹ç™ºãƒ—ロファイルãŒç”¨æ„ã•れã¦ãŠã‚Šã€ãã®ãŸã‚ã«ã®ã¿ä½¿ç”¨ã™ã‚‹ +å¿…è¦ãŒã‚りã¾ã™ã€‚ + +セットアップ +############ + +Scorpio ã«ã¯2ã¤ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã‚’インストールã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ + +Postgres +-------- + +`Postgres DB `__ 㨠`Postgis `__ 拡張機能をダウンロードã—ã€Web +ã‚µã‚¤ãƒˆã®æŒ‡ç¤ºã«å¾“ã£ã¦ã‚»ãƒƒãƒˆã‚¢ãƒƒãƒ—ã—ã¦ãã ã•ã„。 + +Scorpio ã¯ã€Postgres 10 ã§ãƒ†ã‚¹ãƒˆãŠã‚ˆã³é–‹ç™ºã•れã¦ã„ã¾ã™ã€‚ + +Scorpio ãŒä½¿ç”¨ã™ã‚‹ãƒ‡ãƒ•ォルトã®ãƒ¦ãƒ¼ã‚¶ãƒ¼åã¨ãƒ‘スワード㯠"ngb" ã§ã™ã€‚別ã®ãƒ¦ãƒ¼ã‚¶ãƒ¼åã¾ãŸã¯ãƒ‘スワードを使用ã™ã‚‹å ´åˆã¯ã€ +StorageManager ãŠã‚ˆã³ RegistryManager ã‚’èµ·å‹•ã™ã‚‹ã¨ãã«ãƒ‘ラメーターã¨ã—ã¦ãれらを指定ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚例ãˆã°ã€ + +.. code:: console + + java -jar Storage/StorageManager/target/StorageManager--SNAPSHOT.jar --reader.datasource.username=funkyusername --reader.datasource.password=funkypassword + +ã¾ãŸã¯ + +.. code:: console + + java -jar Registry/RegistryManager/target/RegistryManager--SNAPSHOT.jar --spring.datasource.username=funkyusername --spring.datasource.password=funkypassword + +postgres ã§å¯¾å¿œã™ã‚‹ãƒ¦ãƒ¼ã‚¶ãƒ¼ ("ngb" ã¾ãŸã¯é¸æŠžã—ãŸåˆ¥ã®ãƒ¦ãƒ¼ã‚¶ãƒ¼å) を作æˆã™ã‚‹ã“ã¨ã‚’忘れãªã„ã§ãã ã•ã„。ã“れã¯ã€ +データベース接続ã®ãŸã‚ã« Spring Cloud サービスã«ã‚ˆã£ã¦ä½¿ç”¨ã•れã¾ã™ã€‚ターミナルã«ã„ã‚‹é–“ã«ã€postgres ユーザーã¨ã—㦠psql +コンソールã«ãƒ­ã‚°ã‚¤ãƒ³ã—ã¾ã™: + +.. code:: console + + sudo -u postgres psql + +次ã«ã€ãƒ‡ãƒ¼ã‚¿ãƒ™ãƒ¼ã‚¹ "ngb" を作æˆã—ã¾ã™: + +.. code:: console + + postgres=# create database ngb; + +ユーザー "ngb" を作æˆã—ã€ã‚¹ãƒ¼ãƒ‘ーユーザーã«ã—ã¾ã™: + +.. code:: console + + postgres=# create user ngb with encrypted password 'ngb'; + postgres=# alter user ngb with superuser; + +データベースã«å¯¾ã™ã‚‹ç‰¹æ¨©ã‚’付与ã—ã¾ã™: + +.. code:: console + + postgres=# grant all privileges on database ngb to ngb; + +ã¾ãŸã€Postgis 拡張機能用ã®ç‹¬è‡ªã®ãƒ‡ãƒ¼ã‚¿ãƒ™ãƒ¼ã‚¹/スキーマを作æˆã—ã¾ã™: + +.. code:: console + + postgres=# CREATE DATABASE gisdb; + postgres=# \connect gisdb; + postgres=# CREATE SCHEMA postgis; + postgres=# ALTER DATABASE gisdb SET search_path=public, postgis, contrib; + postgres=# \connect gisdb; + postgres=# CREATE EXTENSION postgis SCHEMA postgis; + +Apache Kafka +------------ + +Scorpio ã¯ã€ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“ス間ã®é€šä¿¡ã®ãŸã‚ã«ã€`Apache Kafka `__ を使用ã—ã¾ã™ã€‚ + +Scorpio ã¯ã€Kafka version 2.12-2.1.0 ã§ãƒ†ã‚¹ãƒˆãŠã‚ˆã³é–‹ç™ºã•れã¦ã„ã¾ã™ã€‚ + +`Apache Kafka `__ をダウンロードã—ã€Web ã‚µã‚¤ãƒˆã®æŒ‡ç¤ºã«å¾“ã£ã¦ãã ã•ã„。 + +Kafka ã‚’é–‹å§‹ã™ã‚‹ã«ã¯ã€æ¬¡ã®2ã¤ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã‚’é–‹å§‹ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ + +zookeeper を開始〠+ +.. code:: console + + /bin/[Windows]/zookeeper-server-start.[bat|sh] /config/zookeeper.properties + +Kafka server を開始〠+ +.. code:: console + + /bin/[Windows]/kafka-server-start.[bat|sh] /config/server.properties + +詳細ã«ã¤ã„ã¦ã¯ã€Kafka ã® `Webサイト `__ ã‚’ã”覧ãã ã•ㄠ。 + +Docker コンテナã®å–å¾— +~~~~~~~~~~~~~~~~~~~~~ + +ç¾åœ¨ã® Maven ビルドã¯ã€Maven プロファイルを使用ã—ã¦ãƒ“ルドã‹ã‚‰ãƒˆãƒªã‚¬ãƒ¼ã™ã‚‹2種類㮠Docker コンテナ生æˆã‚’サãƒãƒ¼ãƒˆã—㦠+ã„ã¾ã™ã€‚ + +最åˆã®ãƒ—ロファイル㯠'docker' ã¨å‘¼ã°ã‚Œã€æ¬¡ã®ã‚ˆã†ã«å‘¼ã³å‡ºã™ã“ã¨ãŒã§ãã¾ã™ã€‚ + +.. code:: console + + sudo mvn clean package -DskipTests -Pdocker + +ã“れã«ã‚ˆã‚Šã€ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スã”ã¨ã«å€‹åˆ¥ã® Docker コンテナãŒç”Ÿæˆã•れã¾ã™ã€‚対応ã™ã‚‹ docker-compose ファイル㯠+``docker-compose-dist.yml`` ã§ã™ã€‚ + +2番目ã®ãƒ—ロファイル㯠'docker-aaio' ã¨å‘¼ã°ã‚Œã¾ã™ (ã»ã¼ã™ã¹ã¦ãŒ1ã¤ã«ãªã£ã¦ã„ã¾ã™)。ã“れã«ã‚ˆã‚Šã€Kafka メッセージãƒã‚¹ã¨ +postgres データベースを除ãブローカーã®ã™ã¹ã¦ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã«å¯¾ã—ã¦å˜ä¸€ã® Docker コンテナãŒç”Ÿæˆã•れã¾ã™ã€‚ + +aaio ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã‚’å–å¾—ã™ã‚‹ã«ã¯ã€æ¬¡ã®ã‚ˆã†ã« Maven ビルドを実行ã—ã¾ã™ã€‚ + +.. code:: console + + sudo mvn clean package -DskipTests -DskipDefault -Pdocker-aaio + +対応ã™ã‚‹ docker-compose ファイル㯠``docker-compose-aaio.yml`` ã§ã™ã€‚ + +Docker コンテナã®èµ·å‹• +~~~~~~~~~~~~~~~~~~~~~ + +Docker コンテナを起動ã™ã‚‹ã«ã¯ã€å¯¾å¿œã™ã‚‹ docker-compose ファイルを使用ã—ã¦ãã ã•ã„。ã¤ã¾ã‚Šã€ + +.. code:: console + + sudo docker-composer -f docker-compose-aaio.yml up + +コンテナをé©åˆ‡ã«åœæ­¢ã™ã‚‹ã«ã¯ã€ + +.. code:: console + + sudo docker-composer -f docker-compose-aaio.yml down + +Kafka docker イメージ㨠docker-compose ã«é–¢ã™ã‚‹ä¸€èˆ¬çš„ãªæ³¨æ„ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Kafka docker コンテナーã§ã¯ã€ç’°å¢ƒå¤‰æ•° ``KAFKA_ADVERTISED_HOST_NAME`` を指定ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ ã“れã¯ã€docker-compose +ファイル㧠Docker ホスト IP ã¨ä¸€è‡´ã™ã‚‹ã‚ˆã†ã«å¤‰æ›´ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚``127.0.0.1`` を使用ã§ãã¾ã™ãŒã€ã“れã«ã‚ˆã‚Šã€Kafka +をクラスターモード㧠実行ã§ããªããªã‚Šã¾ã™ã€‚ + +詳細ã«ã¤ã„ã¦ã¯ã€https://hub.docker.com/r/wurstmeister/kafka ã‚’å‚ç…§ã—ã¦ãã ã•ã„。 + +Maven ã®å¤–部㧠Docker ビルドを実行 +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +jars ã®ãƒ“ルドを Docker ビルドã‹ã‚‰åˆ†é›¢ã—ãŸã„å ´åˆã¯ã€ç‰¹å®šã® VARS ã‚’ Docker ã«æä¾›ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚次ã®ãƒªã‚¹ãƒˆã¯ã€ +ルートディレクトリã‹ã‚‰ docker build を実行ã—ãŸå ´åˆã®ã€ã™ã¹ã¦ã®å¤‰æ•°ã¨ãã®æ„図ã•れãŸå€¤ã‚’示ã—ã¦ã„ã¾ã™ã€‚ + +- ``BUILD_DIR_ACS = Core/AtContextServer`` + +- ``BUILD_DIR_SCS = SpringCloudModules/config-server`` + +- ``BUILD_DIR_SES = SpringCloudModules/eureka`` + +- ``BUILD_DIR_SGW = SpringCloudModules/gateway`` + +- ``BUILD_DIR_HMG = History/HistoryManager`` + +- ``BUILD_DIR_QMG = Core/QueryManager`` + +- ``BUILD_DIR_RMG = Registry/RegistryManager`` + +- ``BUILD_DIR_EMG = Core/EntityManager`` + +- ``BUILD_DIR_STRMG = Storage/StorageManager`` + +- ``BUILD_DIR_SUBMG = Core/SubscriptionManager`` + +- ``JAR_FILE_BUILD_ACS = AtContextServer-${project.version}.jar`` + +- ``JAR_FILE_BUILD_SCS = config-server-${project.version}.jar`` + +- ``JAR_FILE_BUILD_SES = eureka-server-${project.version}.jar`` + +- ``JAR_FILE_BUILD_SGW = gateway-${project.version}.jar`` + +- ``JAR_FILE_BUILD_HMG = HistoryManager-${project.version}.jar`` + +- ``JAR_FILE_BUILD_QMG = QueryManager-${project.version}.jar`` + +- ``JAR_FILE_BUILD_RMG = RegistryManager-${project.version}.jar`` + +- ``JAR_FILE_BUILD_EMG = EntityManager-${project.version}.jar`` + +- ``JAR_FILE_BUILD_STRMG = StorageManager-${project.version}.jar`` + +- ``JAR_FILE_BUILD_SUBMG = SubscriptionManager-${project.version}.jar`` + +- ``JAR_FILE_RUN_ACS = AtContextServer.jar`` + +- ``JAR_FILE_RUN_SCS = config-server.jar`` + +- ``JAR_FILE_RUN_SES = eureka-server.jar`` + +- ``JAR_FILE_RUN_SGW = gateway.jar`` + +- ``JAR_FILE_RUN_HMG = HistoryManager.jar`` + +- ``JAR_FILE_RUN_QMG = QueryManager.jar`` + +- ``JAR_FILE_RUN_RMG = RegistryManager.jar`` + +- ``JAR_FILE_RUN_EMG = EntityManager.jar`` + +- ``JAR_FILE_RUN_STRMG = StorageManager.jar`` + +- ``JAR_FILE_RUN_SUBMG = SubscriptionManager.jar`` + +コンãƒãƒ¼ãƒãƒ³ãƒˆã®èµ·å‹• +#################### + +ビルド後ã€å€‹ã€…ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã‚’通常㮠Jar ファイルã¨ã—ã¦é–‹å§‹ã—ã¾ã™ã€‚以下を実行ã—㦠Spring Cloud サービスを開始ã—ã¾ã™ã€‚ + +.. code:: console + + java -jar SpringCloudModules/eureka/target/eureka-server--SNAPSHOT.jar + java -jar SpringCloudModules/gateway/target/gateway--SNAPSHOT.jar + java -jar SpringCloudModules/config-server/target/config-server--SNAPSHOT.jar + +ブローカーコンãƒãƒ¼ãƒãƒ³ãƒˆã‚’é–‹å§‹ã—ã¾ã™ã€‚ + +.. code:: console + + java -jar Storage/StorageManager/target/StorageManager--SNAPSHOT.jar + java -jar Core/QueryManager/target/QueryManager--SNAPSHOT.jar + java -jar Registry/RegistryManager/target/RegistryManager--SNAPSHOT.jar + java -jar Core/EntityManager/target/EntityManager--SNAPSHOT.jar + java -jar History/HistoryManager/target/HistoryManager--SNAPSHOT.jar + java -jar Core/SubscriptionManager/target/SubscriptionManager--SNAPSHOT.jar + java -jar Core/AtContextServer/target/AtContextServer--SNAPSHOT.jar + +æ§‹æˆã®å¤‰æ›´ +---------- + +æ§‹æˆå¯èƒ½ãªã™ã¹ã¦ã®ã‚ªãƒ—ションã¯ã€application.properties ファイルã«ã‚りã¾ã™ã€‚ãれらを変更ã™ã‚‹ã«ã¯ã€2ã¤ã®ã‚ªãƒ—ション㌠+ã‚りã¾ã™ã€‚ビルドå‰ã«ãƒ—ロパティを変更ã™ã‚‹ã‹ã€``--=` を追加ã™ã‚‹ã“ã¨ã§ã‚³ãƒ³ãƒ•ィグを +上書ãã™ã‚‹ã‹ã€ãªã© + +.. code:: console + + java -jar Storage/StorageManager/target/StorageManager--SNAPSHOT.jar --reader.datasource.username=funkyusername --reader.datasource.password=funkypassword` + +CORS サãƒãƒ¼ãƒˆã‚’有効化 +--------------------- + +ã“れらã®è¨­å®šã‚ªãƒ—ションをæä¾›ã™ã‚‹ã“ã¨ã«ã‚ˆã‚Šã€ã‚²ãƒ¼ãƒˆã‚¦ã‚§ã‚¤ã§ cors サãƒãƒ¼ãƒˆã‚’有効ã«ã§ãã¾ã™ã€‚- gateway.enablecors - +デフォルト㯠False ã§ã™ã€‚ä¸€èˆ¬çš„ãªæœ‰åŠ¹åŒ–ã®å ´åˆã¯ true ã«è¨­å®šã—ã¾ã™ - gateway.enablecors.allowall - デフォルト㯠False +ã§ã™ã€‚ã™ã¹ã¦ã®ã‚ªãƒªã‚¸ãƒ³ã‹ã‚‰ã® CORS を有効ã«ã—ã€ã™ã¹ã¦ã®ãƒ˜ãƒƒãƒ€ãƒ¼ã¨ã™ã¹ã¦ã®ãƒ¡ã‚½ãƒƒãƒ‰ã‚’許å¯ã™ã‚‹ã«ã¯ã€true ã«è¨­å®šã—ã¾ã™ã€‚ +安全ã§ã¯ã‚りã¾ã›ã‚“ãŒã€ä¾ç„¶ã¨ã—ã¦éžå¸¸ã«é »ç¹ã«ä½¿ç”¨ã•れã¾ã™ã€‚-gateway.enablecors.allowedorigin - 許å¯ã•れãŸã‚ªãƒªã‚¸ãƒ³ã® +コンマ区切りリスト -gateway.enablecors.allowedheader - 許å¯ã•れãŸãƒ˜ãƒƒãƒ€ãƒ¼ã®ã‚³ãƒ³ãƒžåŒºåˆ‡ã‚Šãƒªã‚¹ +-gateway.enablecors.allowedmethods - 許å¯ã•れãŸãƒ¡ã‚½ãƒƒãƒ‰ã®ã‚³ãƒ³ãƒžåŒºåˆ‡ã‚Šãƒªã‚¹ãƒˆ -gateway.enablecors.allowallmethods- +デフォルト㯠False ã§ã™ã€‚ã™ã¹ã¦ã®ãƒ¡ã‚½ãƒƒãƒ‰ã‚’許å¯ã™ã‚‹ã«ã¯ã€true ã«è¨­å®šã—ã¾ã™ã€‚true ã«è¨­å®šã™ã‚‹ã¨ã€allowmethods +エントリãŒä¸Šæ›¸ãã•れã¾ã™ã€‚ + +トラブルシューティング +###################### + +Missing JAXB dependencies +------------------------- + +eureka-server ã‚’èµ·å‹•ã™ã‚‹ã¨ã€ **java.lang.TypeNotPresentException: Type javax.xml.bind.JAXBContext not present** 例外㌠+発生ã™ã‚‹å ´åˆãŒã‚りã¾ã™ã€‚ ãã®å ´åˆã€ãƒžã‚·ãƒ³ã§ Java 11 を実行ã—ã¦ã„ã‚‹å¯èƒ½æ€§ãŒéžå¸¸ã«é«˜ããªã‚Šã¾ã™ã€‚Java 9 パッケージ以é™ã€ +``javax.xml.bind`` ã¯éžæŽ¨å¥¨ã¨ã—ã¦ãƒžãƒ¼ã‚¯ã•れã€Java 11 ã§æœ€çµ‚çš„ã«å®Œå…¨ã«å‰Šé™¤ã•れã¾ã—ãŸã€‚ + +ã“ã®å•題を修正ã—㦠eureka-server を実行ã™ã‚‹ã«ã¯ã€é–‹å§‹ã™ã‚‹å‰ã«ã€ä»¥ä¸‹ã® JAXB Maven ä¾å­˜é–¢ä¿‚ã‚’ +``ScorpioBroker/SpringCloudModules/eureka/pom.xml`` ã«æ‰‹å‹•ã§è¿½åŠ ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ + +.. code:: xml + + ... + + ... + + com.sun.xml.bind + jaxb-core + 2.3.0.1 + + + javax.xml.bind + jaxb-api + 2.3.1 + + + com.sun.xml.bind + jaxb-impl + 2.3.1 + + ... + + ... + +ã“れã¯ã€æ¡ä»¶ä»˜ãä¾å­˜é–¢ä¿‚を使用ã—ã¦ä¿®æ­£ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ diff --git a/scorpio-broker/docs/ja/source/callFlow.rst b/scorpio-broker/docs/ja/source/callFlow.rst new file mode 100644 index 0000000000000000000000000000000000000000..71918f3b4f208256c908abc3bbd7a7d5ed9f3f98 --- /dev/null +++ b/scorpio-broker/docs/ja/source/callFlow.rst @@ -0,0 +1,286 @@ +******************** +オペレーションフロー +******************** + +エンティティã®ä½œæˆ/æ›´æ–°/追加 +############################ + +.. figure:: ../../en/source/figures/flow-1.png + +ã“ã®å›³ã¯ã€Scorpio Broker システムã§ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã®ä½œæˆ/æ›´æ–°/追加 (create/update/append) ã®æ“作フローを示ã—ã¦ã„ã¾ã™ã€‚ +マークã•れãŸã‚¹ãƒ†ãƒƒãƒ—ã®è§£é‡ˆã¯æ¬¡ã®ã¨ãŠã‚Šã§ã™ã€‚ + +1. アプリケーションã¯ã€NGSI-LD 準拠ã®ã‚¤ãƒ³ã‚¿ãƒ¼ãƒ•ェイス (Service API gateway ã«ã‚ˆã£ã¦å…¬é–‹) を呼ã³å‡ºã—ã¦ã€HTTP POST + リクエストã®å½¢å¼ã§ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚’作æˆ/æ›´æ–°/追加ã—ã¾ã™ã€‚ + +2. リクエスト㯠Service API gateway ã«å…¥ã‚Šã¾ã™ã€‚ + +2.1. Service API gateway ã¯ã€Discovery & registry service ã‹ã‚‰ã€å®Ÿéš›ã«ã‚µãƒ¼ãƒ“スをæä¾›ã—ã¦ã„るマイクロサービス +エンドãƒã‚¤ãƒ³ãƒˆ (ç€ä¿¡è¦æ±‚を転é€ã™ã‚‹å¿…è¦ãŒã‚る場所) を検出ã—ã¾ã™ã€‚ + +2.2. Service API gateway ã¯ã€HTTP リクエストを Entity Manager マイクロサービスã«è»¢é€ã—ã¾ã™ã€‚ + +3. Entity Manager ã¯ã€å†…部㧠LDContext resolver service を呼ã³å‡ºã—ã¦ã€POST リクエストã¨ã¨ã‚‚ã«é€ä¿¡ã•れãŸç‰¹å®šã® + コンテキストã§ãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã‚’解決ã—ã¾ã™ã€‚ペイロードãŒã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã§è§£æ±ºã•れるã¨ã€ãƒˆãƒ”ック “Entities†ã‹ã‚‰ä»¥å‰ã« + ä¿å­˜ã•れãŸãƒ‡ãƒ¼ã‚¿/エンティティをフェッãƒã—ã€EntityID ã«åŸºã¥ã„ã¦æ—¢å­˜ã®ä¿å­˜ã•れãŸã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã«å¯¾ã—ã¦è¦æ±‚ã•れ㟠+ エンティティを検証ã—ã¾ã™ã€‚ + +- エンティティãŒã™ã§ã«å­˜åœ¨ã™ã‚‹å ´åˆ (ã¾ãŸã¯å¤‰æ›´ãŒè¦æ±‚ã•れã¦ã„ã‚‹ã™ã¹ã¦ã®å±žæ€§ã¨å€¤ã‚’å«ã‚€å ´åˆ)ã€ã‚¨ãƒ©ãƒ¼ãƒ¡ãƒƒã‚»ãƒ¼ã‚¸ + (“already existsâ€) ãŒåŒã˜ã‚‚ã®ã«å¯¾ã—ã¦å¿œç­”ã•れã€ãれ以上ã®ã‚¹ãƒ†ãƒƒãƒ—ã¯å®Ÿè¡Œã•れã¾ã›ã‚“。 + +- ãれ以外ã®å ´åˆã¯ã€ã•らã«å‡¦ç†ã™ã‚‹ãŸã‚ã«ç§»å‹•ã—ã¾ã™ã€‚ + +4. Entity Manager (EM) ã¯ã€æ¬¡ã®ã‚ˆã†ã«ã€è¦æ±‚ã•れãŸã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ (E1) ä½œæˆæ“作ã®å…¬é–‹/ä¿å­˜ã‚’行ã„ã€è¦æ±‚者ã«å¿œç­”ã‚’é€ä¿¡ã—ã¾ã™ã€‚ + +4.1. EM ã¯ã€ãƒˆãƒ”ック “Entities†ã®ä¸‹ã§ Kafka ã« E1 をパブリッシュã—ã¾ã™ã€‚ + +4.2. EM ã¯ã€ãƒˆãƒ”ック “Entity_Create/Update/Append†ã®ä¸‹ã® Kafka ã§ã‚‚ E1 をパブリッシュã—ã¦ã„ã¾ã™ã€‚ + +4.3. ãƒ‘ãƒ–ãƒªãƒƒã‚·ãƒ¥ã®æ“ä½œãŒæˆåŠŸã™ã‚‹ã¨ã€EM ã¯å¿œç­”ã‚’é€ã‚Šè¿”ã—ã¾ã™ã€‚ + +**注æ„**: “Entities†トピックã¯ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã®ä½œæˆ/æ›´æ–°/追加æ“作ã®ã„ãšã‚Œã‹ã«ã‚ˆã£ã¦ä¸€å®šæœŸé–“ã«è¡Œã‚れãŸã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã® +ã™ã¹ã¦ã®å¤‰æ›´ã‚’ä¿å­˜ã—ã¾ã™ã€‚ãŸã ã—ã€â€œEntity_Create/Update/Append†トピック (CREATE æ“作ã«å›ºæœ‰) ã¯ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ä½œæˆæ“作㮠+データ変更ã®ã¿ã‚’ä¿å­˜ã—ã¾ã™ã€‚æ“作ã”ã¨ã«ç•°ãªã‚‹ãƒˆãƒ”ックをæŒã¤ã“ã¨ã§ã€ç•°ãªã‚‹æ¶ˆè²»è€…é–“ã®ç•°ãªã‚‹è¦ä»¶é–“ã®ã‚ã„ã¾ã„ãªçжæ³ã‚’ +回é¿ã§ãã¾ã™ã€‚ãŸã¨ãˆã°ã€ã‚µãƒ–スクリプションマãƒãƒ¼ã‚¸ãƒ£ãƒ¼ã¯ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£å…¨ä½“ã€ç‰¹å®šã®å±žæ€§ã®ã‚»ãƒƒãƒˆã‚’サブスクライブã™ã‚‹å¿…è¦ãŒ +ã‚ã‚‹å ´åˆã‚„ã€ç‰¹å®šã®å±žæ€§ã®å€¤ã®å¤‰æ›´ã§ã‚ã‚‹å ´åˆãŒã‚りã¾ã™ã€‚ã—ãŸãŒã£ã¦ã€æ“作ã”ã¨ã«å€‹åˆ¥ã®ãƒˆãƒ”ックãŒç¶­æŒã•れã¦ã„ãªã„å ´åˆã€ +ã“れらã™ã¹ã¦ã®è¦ä»¶ã‚’管ç†ã™ã‚‹ã“ã¨ã¯å›°é›£ã§ã‚ã‚Šã€æ“作ã”ã¨ã«å€‹åˆ¥ã®ãƒˆãƒ”ックãŒç¶­æŒã•れã¦ã„ã‚‹å ´åˆã€ä»»æ„ã®æ™‚点ã§ç‰¹å®šã® +エンティティã®ãƒ‡ãƒ¼ã‚¿ã«ç›´æŽ¥ãƒ‡ãƒ«ã‚¿å¤‰æ›´ã‚’ æä¾›ã™ã‚‹ãŸã‚ã«éžå¸¸ã«å˜ç´”化ã•れã¾ã™ã€‚ã—ãŸãŒã£ã¦ã€ã™ã¹ã¦ã®é‹ç”¨ãƒ‡ãƒ¼ã‚¿ã‚’1ã¤ã®ãƒˆãƒ”ック +ã«ã¾ã¨ã‚ã¦ã‚‚ã€é‹ç”¨ã€ãƒ‡ãƒ¼ã‚¿ã€ã¾ãŸã¯ãƒ‡ãƒ«ã‚¿ãƒ‡ãƒ¼ã‚¿ãƒ¬ãƒ™ãƒ«ã®è¦ä»¶ã§ã‚µãƒ–スクライブ/管ç†ã™ã‚‹ãŸã‚ã«å¿…è¦ãªãƒ‡ã‚«ãƒƒãƒ—リングã€ç°¡ç´ åŒ–〠+ãŠã‚ˆã³æŸ”軟性をæä¾›ã™ã‚‹ã“ã¨ã¯ã§ãã¾ã›ã‚“。ãã®ãŸã‚ã€æ“作ã”ã¨ã«å€‹åˆ¥ã®ãƒˆãƒ”ックを作æˆã—ã€(一定期間ã«ã‚ãŸã‚‹ã™ã¹ã¦ã®æ“作㮠+エンティティ全体ã®å¤‰æ›´ã‚’検証ã™ã‚‹å¿…è¦ãŒã‚ã‚‹) 特定ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã«å¯¾ã™ã‚‹ã™ã¹ã¦ã®æ“作ã®ã™ã¹ã¦ã®å¤‰æ›´ を記録ã™ã‚‹ãŸã‚ã®1ã¤ã® +共通トピックを作æˆã™ã‚‹ã“ã¨ãŒè¨­è¨ˆä¸Šã®å¥½ã¾ã—ã„é¸æŠžã§ã™ã€‚指定ã•れãŸãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã®ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã¯ã€LDContext resolver service +ã«ã‚ˆã£ã¦ã€AtContext ã¨ã„ã†åå‰ã§ Kafka ãƒˆãƒ”ãƒƒã‚¯ã«æ ¼ç´ã•れã¦ã„ã¾ã™ã€‚ + +5. メッセージ㌠Kafka トピックã«å…¬é–‹ã•れるã¨ã€ãã®ãƒˆãƒ”ックã®ã‚³ãƒ³ã‚·ãƒ¥ãƒ¼ãƒžãƒ¼ã¯ã€ãれらã®ãƒˆãƒ”ックをサブスクライブã¾ãŸã¯ + リッスンã—ãŸãƒ¦ãƒ¼ã‚¶ãƒ¼ã«ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションã•れã¾ã™ã€‚ã“ã®å ´åˆã€ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションをå—ã‘å–ã£ãŸ + “Entity Create/Update/Append†トピックã®ã‚³ãƒ³ã‚·ãƒ¥ãƒ¼ãƒžãƒ¼ã¯ã€æ¬¡ã®ã“ã¨ã‚’行ã„ã¾ã™: + +5.1. サブスクリプションマãƒãƒ¼ã‚¸ãƒ£ãƒ¼ã¯ã€é–¢é€£ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã®ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションをå—ã‘å–ã‚‹ã¨ã€ç¾åœ¨ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã® +ノーティフィケーション検証をãƒã‚§ãƒƒã‚¯ã—ã€ãれã«å¿œã˜ã¦ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションをé€ä¿¡ã™ã‚‹å¿…è¦ãŒã‚ã‚‹ã‹ã©ã†ã‹ã‚’ãƒã‚§ãƒƒã‚¯ã—ã¾ã™ã€‚ + +5.2. Storage Manager ã¯ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã¨ CR トピックã‹ã‚‰ã®ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションã«å¿œã˜ã¦ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£é–¢é€£ã®å¤‰æ›´ã‚’ DB +ãƒ†ãƒ¼ãƒ–ãƒ«ã«æ ¼ç´/変更ã™ã‚‹ãŸã‚ã®è¿½åŠ ã®æ“作をトリガーã—ã¾ã™ã€‚ + +6. ã“れã§ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ãƒžãƒãƒ¼ã‚¸ãƒ£ãƒ¼ã¯ã€Context Registry ã¸ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ãƒ‡ãƒ¼ã‚¿ãƒ¢ãƒ‡ãƒ«ã®ãƒ¬ã‚¸ã‚¹ãƒˆãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã®æº–備もã—ã¾ã™ã€‚ + 以下ã¯ã€åŒã˜ã“ã¨ã‚’锿ˆã™ã‚‹ãŸã‚ã«å®Ÿè¡Œã™ã‚‹ãã®ä»–ã®æ©Ÿèƒ½ã§ã™: + +6.1. ãã®ãŸã‚ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã‹ã‚‰ Csource registration ペイロード (NGSI_LD 仕様セクション C.3 ã«ã‚ˆã‚‹ï¼‰ã‚’準備ã—〠+å¿…è¦ãªãƒ•ィールド (ID, ブローカー IP ã¨ã—ã¦ã®ã‚¨ãƒ³ãƒ‰ãƒã‚¤ãƒ³ãƒˆã€ãƒ­ã‚±ãƒ¼ã‚·ãƒ§ãƒ³ãªã©) ã«å…¥åŠ›ã—ã¾ã™ã€‚ãã®å¾Œã€ +エンティティマãƒãƒ¼ã‚¸ãƒ£ã¯ã€ã“ã®ä½œæˆã•れ㟠Csource ペイロードを CR ãƒˆãƒ”ãƒƒã‚¯ã«æ›¸ãè¾¼ã¿ã¾ã™ã€‚ + +6.2. CR Manager ã¯ã€ã“ã® CR トピックをリッスンã—ã€ä¸€éƒ¨ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ãŒãƒ¬ã‚¸ã‚¹ãƒˆãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã•れãŸã“ã¨ã‚’知るã“ã¨ãŒã§ãã¾ã™ã€‚ + +6.3. CR Manager ã¯ã€æ›´æ–°ãŒã‚ã‚‹å ´åˆã¯ã€Csource ãƒˆãƒ”ãƒƒã‚¯ã«æ›¸ãè¾¼ã¿ã¾ã™ã€‚ + +エンティティã®ã‚µãƒ–スクリプション +################################ + +.. figure:: ../../en/source/figures/flow-2.png + +ã“ã®å›³ã¯ã€ScorpioBroker システムã§ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚µãƒ–スクリプションã®é‹ç”¨ãƒ•ローを示ã—ã¦ã„ã¾ã™ã€‚マークã•れãŸã‚¹ãƒ†ãƒƒãƒ—ã® +è§£é‡ˆã¯æ¬¡ã®ã¨ãŠã‚Šã§ã™ã€‚ + +1. アプリケーションã¯ã€(Service API gateway ã«ã‚ˆã£ã¦å…¬é–‹ã•れる) NGSI-LD 準拠ã®ã‚¤ãƒ³ã‚¿ãƒ¼ãƒ•ェイスを呼ã³å‡ºã—ã¦ã€HTTP POST + è¦æ±‚ã®å½¢å¼ã§ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ (ã¾ãŸã¯å±žæ€§) をサブスクライブã—ã¾ã™ã€‚ + +2. リクエスト㯠Service API gateway ã«å…¥ã‚Šã¾ã™ã€‚ + +2.1. Service API gateway ã¯ã€Discovery & registry service ã‹ã‚‰ã€å®Ÿéš›ã«ã‚µãƒ¼ãƒ“スをæä¾›ã—ã¦ã„るマイクロサービス +エンドãƒã‚¤ãƒ³ãƒˆ (ç€ä¿¡è¦æ±‚を転é€ã™ã‚‹å¿…è¦ãŒã‚る場所) を検出ã—ã¾ã™ã€‚ + +2.2. Service API gateway ã¯ã€HTTP リクエストを Subscription Manager マイクロサービスã«è»¢é€ã—ã¾ã™ã€‚ + +3. Subscription Manager ã¯ã€å†…部㧠LDContext resolver service を呼ã³å‡ºã—ã¦ã€POST リクエストã¨ã¨ã‚‚ã«é€ä¿¡ã•れãŸç‰¹å®šã® + コンテキストã§ãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã‚’解決ã—ã¾ã™ã€‚次ã«ã€ã‚µãƒ–スクリプションマãƒãƒ¼ã‚¸ãƒ£ãƒ¼ã¯ã€ãƒˆãƒ”ック “Subscription†ã‹ã‚‰ä»¥å‰ã« + ä¿å­˜ã•れãŸãƒ‡ãƒ¼ã‚¿/エンティティをフェッãƒã—ã€EntityID ã«åŸºã¥ã„ã¦æ—¢å­˜ã®ä¿å­˜ã•れãŸå€¤ã«å¯¾ã—ã¦è¦æ±‚ã•れãŸã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚’ + 検証ã—ã¾ã™ã€‚ + +- ç¾åœ¨ã®ãƒªã‚¯ã‚¨ã‚¹ãƒˆã®ãƒ‡ãƒ¼ã‚¿ãŒã™ã§ã«å­˜åœ¨ã™ã‚‹å ´åˆã¯ã€åŒã˜ãƒ¡ãƒƒã‚»ãƒ¼ã‚¸ã«å¯¾ã—ã¦ã‚¨ãƒ©ãƒ¼ãƒ¡ãƒƒã‚»ãƒ¼ã‚¸ãŒå¿œç­”ã•れã€ãれ以上㮠+ ステップã¯å®Ÿè¡Œã•れã¾ã›ã‚“。 + +- ãれ以外ã®å ´åˆã¯ã€ã•らã«å‡¦ç†ã™ã‚‹ãŸã‚ã«ç§»å‹•ã—ã¾ã™ã€‚ + +4. Subscription Manager (SM) ã¯ã€æ¬¡ã®ã‚ˆã†ã«è¦æ±‚ã•ã‚ŒãŸæ“作ã«å¯¾ã™ã‚‹å¿œç­”をパブリッシュ/ストアã—ã€è¦æ±‚者ã«é€ä¿¡ã—ã¾ã™ã€‚ + +4.1. SM ã¯ãƒˆãƒ”ック “Subscription†ã®ä¸‹ã§ã‚µãƒ–スクリプション S(E1) ã‚’ Kafka ã«å…¬é–‹ã—ã¾ã™ã€‚ + +4.2. SM ã¯ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーション機能を開始ã—ã€é–¢é€£ã™ã‚‹ã‚µãƒ–スクリプションã®ãƒªãƒƒã‚¹ãƒ³ã‚’スタート/キープã—ã¾ã™ã€‚ + +4.2.1. エンティティ関連ã®ãƒˆãƒ”ック “Create/Update/Append†+ +4.2.2. コンテキストソース関連ã®ãƒˆãƒ”ックã€ã¤ã¾ã‚Šã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚½ãƒ¼ã‚¹ã®å°†æ¥ã®ç™»éŒ²ã®ãŸã‚㮠“CSource†トピック。ã“れを +行ã†ã“ã¨ã§ã€ã™ã§ã«ã‚µãƒ–スクライブã•れã¦ã„るアイテム/エンティティ㮠Csource ã‚’ CR ã«æ˜Žç¤ºçš„ã«ã‚¯ã‚¨ãƒªã™ã‚‹å¿…è¦ãŒãªããªã‚Šã¾ã™ã€‚ + +4.2.3. 発生ã—ãŸç‰¹å®šã®ã‚¯ã‚¨ãƒªã®çµæžœã‚’åŽé›†ã™ã‚‹ãŸã‚ã® CRQueryResult トピック (存在ã™ã‚‹å ´åˆ)。 + +4.2.4. ã‚µãƒ–ã‚¹ã‚¯ãƒªãƒ—ã‚·ãƒ§ãƒ³è¦æ±‚ã®ã‚µãƒ–スクリプションæ¡ä»¶ãŒæˆåŠŸã™ã‚‹ã¨ã€SM ã¯ã‚µãƒ–スクライブã•れãŸã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã«æŒ‡å®šã•れ㟠+エンドãƒã‚¤ãƒ³ãƒˆã«ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションã—ã¾ã™ã€‚ã¾ãŸã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆãƒ¬ã‚¸ã‚¹ãƒˆãƒªã«ã‚ˆã£ã¦æä¾›ã•れるコンテキストソースã¸ã® +リモートサブスクリプションも実行ã—ã¾ã™ã€‚ + +4.3. Pub ã®æ“ä½œãŒæˆåŠŸã™ã‚‹ã¨ã€SM ã¯å¿œç­”ã‚’é€ã‚Šè¿”ã—ã¾ã™ + +5. SM ã¯ã€ã‚ªãƒ—ションã§ã€å—ä¿¡ã—ãŸã‚µãƒ–ã‚¹ã‚¯ãƒªãƒ—ã‚·ãƒ§ãƒ³è¦æ±‚ã”ã¨ã« CRQuery ãƒˆãƒ”ãƒƒã‚¯ã«æŠ•ç¨¿ã™ã‚‹ã“ã¨ã§ã‚¯ã‚¨ãƒªã‚’ CR ã«ä¸Šã’ã‚‹ã“ã¨ãŒ +ã§ãã¾ã™ (ã‚µãƒ–ã‚¹ã‚¯ãƒªãƒ—ã‚·ãƒ§ãƒ³è¦æ±‚ã”ã¨ã«1回ã®ã¿)。メッセージ㌠CRQuery トピックã«å…¬é–‹ã•れるã¨ã€ã“ã®ãƒˆãƒ”ックを +サブスクライブã¾ãŸã¯ãƒªãƒƒã‚¹ãƒ³ã—ãŸãƒ¦ãƒ¼ã‚¶ãƒ¼ã«ã‚³ãƒ³ã‚·ãƒ¥ãƒ¼ãƒžãƒ¼ CR ã«ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションã•れã¾ã™ã€‚ã“れã§ã€CR +ã¯æ¬¡ã®ã“ã¨ã‚’行ã„ã¾ã™ã€‚ + +5.1. CR ã¯ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションをå—ä¿¡ã—ã€ã“ã®ã‚µãƒ–ã‚¹ã‚¯ãƒªãƒ—ã‚·ãƒ§ãƒ³ãŒæœ‰åйã§ã‚ã‚‹å¯èƒ½æ€§ã®ã‚ã‚‹ CR トピックãŠã‚ˆã³/ã¾ãŸã¯ +CsourceSub トピックã‹ã‚‰ãƒ‡ãƒ¼ã‚¿ã‚’プルã™ã‚‹ã“ã¨ã«ã‚ˆã‚Šã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚½ãƒ¼ã‚¹ã®ãƒªã‚¹ãƒˆã‚’確èªã—ã¾ã™ã€‚ + +5.2. CR ã¯ã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚½ãƒ¼ã‚¹ã®ãƒªã‚¹ãƒˆã‚’ CRQueryResult トピックã«å…¬é–‹ã—ã¾ã™ã€‚ã“ã®ãƒˆãƒ”ックã§ã¯ã€SM ãŒã™ã§ã«ãƒªãƒƒã‚¹ãƒ³ã‚’ +é–‹å§‹ã—ã€æ‰‹é †4.2.3ã¨4.2.4を繰り返ã—ã¾ã™ã€‚ +    +**注æ„**: Csource トピックã«ã¯ã€Csource レジストレーションインターフェイスを介ã—ã¦ç›´æŽ¥ãƒ¬ã‚¸ã‚¹ãƒˆãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã•れ㟠+コンテキストソースã®ãƒªã‚¹ãƒˆãŒå«ã¾ã‚Œã¾ã™ã€‚CR トピックã«ã¯ã€(IoT broker インターフェイスを介ã—ã¦) エンティティ作æˆè¦æ±‚ +ãŠã‚ˆã³/ã¾ãŸã¯ãã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ãƒ¢ãƒ‡ãƒ«ã®ãƒ—ロãƒã‚¤ãƒ€ãƒ¼/データソースã«åŸºã¥ã„ã¦ä½œæˆã•れãŸã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ãƒ‡ãƒ¼ã‚¿ãƒ¢ãƒ‡ãƒ« +(エンティティ ID ã¨ã—ã¦ç¶­æŒ) ã®ãƒžãƒƒãƒ—ãŒå«ã¾ã‚Œã¾ã™ã€‚制é™äº‹é …: Scorpio Broker ã®æœ€åˆã®ãƒªãƒªãƒ¼ã‚¹ã§ã¯ã€Csource クエリ㯠+サãƒãƒ¼ãƒˆã•れã¦ã„ã¾ã›ã‚“。代ã‚りã«ã€Csource クエリã¯å†…部メッセージングキューメカニズムã«åŸºã¥ã„ã¦ã„ã¾ã™ã€‚å°†æ¥çš„ã«ã¯ã€ +メッセージキュー㨠REST ベース㮠Csource クエリã®ä¸¡æ–¹ãŒã‚µãƒãƒ¼ãƒˆã•れる予定ã§ã™ã€‚ + +クエリー +######## + +.. figure:: ../../en/source/figures/flow-3.png + +ã“ã®å›³ã¯ã€Scorpio Broker システムã§ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚µãƒ–スクリプションã®é‹ç”¨ãƒ•ローを示ã—ã¦ã„ã¾ã™ã€‚マークã•れãŸã‚¹ãƒ†ãƒƒãƒ—ã® +è§£é‡ˆã¯æ¬¡ã®ã¨ãŠã‚Šã§ã™ã€‚ + +1. アプリケーションã¯ã€NGSI-LD 準拠ã®ã‚¤ãƒ³ã‚¿ãƒ¼ãƒ•ェース (Service API gateway ã«ã‚ˆã£ã¦å…¬é–‹ã•れる) を呼ã³å‡ºã—ã¦ã€HTTP GET + è¦æ±‚ã®å½¢å¼ã§ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£/エンティティ/属性をクエリã—ã¾ã™ã€‚ + +2. リクエスト㯠Service API gateway ã«å…¥ã‚Šã¾ã™ã€‚ + +2.1. Service API gateway ã¯ã€Discovery & registry service ã‹ã‚‰ã€å®Ÿéš›ã«ã‚µãƒ¼ãƒ“スをæä¾›ã—ã¦ã„るマイクロサービス +エンドãƒã‚¤ãƒ³ãƒˆ (ç€ä¿¡è¦æ±‚を転é€ã™ã‚‹å¿…è¦ãŒã‚る場所) を検出ã—ã¾ã™ã€‚ + +2.2. Service API gateway ã¯ã€HTTP リクエストを Query Manager マイクロサービスã«è»¢é€ã—ã¾ã™ã€‚ + +3. クエリマãƒãƒ¼ã‚¸ãƒ£ã¯ã€ãƒˆãƒ”ック “Entities†ã‹ã‚‰ä»¥å‰ã«ä¿å­˜ã•れãŸãƒ‡ãƒ¼ã‚¿/エンティティをフェッãƒã™ã‚‹ã‚ˆã†ã«ãªã‚Šã¾ã—ãŸã€‚ + +- クエリãŒã™ã¹ã¦ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã«å¯¾ã™ã‚‹ã‚‚ã®ã§ã‚ã‚‹ã‹ã€ID や属性をæŒã¤ç‰¹å®šã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ãŒè¦æ±‚ã•れãŸå ´åˆã€ã“れã¯ã€ + ストレージマãƒãƒ¼ã‚¸ãƒ£ãƒ¼ã‚’関与ã•ã›ã‚‹ã“ã¨ãªãã€ã‚¯ã‚¨ãƒªãƒžãƒãƒ¼ã‚¸ãƒ£ãƒ¼ã«ã‚ˆã£ã¦ Kafka エンティティトピックデータã«åŸºã¥ã„㦠+ 直接æä¾›ã•れã¾ã™ã€‚ã¤ã¾ã‚Šã€geo 以外ã®ã‚¯ã‚¨ãƒªã‚„ã€1ã¤ã¾ãŸã¯è¤‡æ•°ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã«é–¢é€£ä»˜ã‘ã‚‰ã‚ŒãŸæ­£è¦è¡¨ç¾ã‚¯ã‚¨ãƒªã®ãªã„ + å˜ç´”ãªã‚¯ã‚¨ãƒªã‚’直接処ç†ã§ãã¾ã™ã€‚ã“ã®å ´åˆã€å¿œç­”ãŒè¿”é€ã•れã€å‡¦ç†ã¯ã‚¹ãƒ†ãƒƒãƒ—7.2 ã«ã‚¸ãƒ£ãƒ³ãƒ—ã—ã¾ã™ã€‚ + +- 複雑ãªã‚¯ã‚¨ãƒªã®å ´åˆã€ã‚¯ã‚¨ãƒªãƒžãƒãƒ¼ã‚¸ãƒ£ã¯ã€æ¬¡ã®æ‰‹é †ã§èª¬æ˜Žã™ã‚‹ã‚ˆã†ã«ã€ã‚¹ãƒˆãƒ¬ãƒ¼ã‚¸ãƒžãƒãƒ¼ã‚¸ãƒ£ã®æ”¯æ´ã‚’å—ã‘ã¾ã™ã€‚ + +4. クエリマãƒãƒ¼ã‚¸ãƒ£ãƒ¼ (複雑ãªã‚¯ã‚¨ãƒªã®å ´åˆ) ã¯ã€ã‚¹ãƒˆãƒ¬ãƒ¼ã‚¸ãƒžãƒãƒ¼ã‚¸ãƒ£ãƒ¼ãŒãƒªãƒƒã‚¹ãƒ³ã—ã¦ã„るクエリトピックã«ã‚¯ã‚¨ãƒª + (メッセージやãã®ä»–ã®ãƒ¡ã‚¿ãƒ‡ãƒ¼ã‚¿ã§ä½¿ç”¨ã•れるもã®ã‚’埋ã‚込む) を公開ã—ã¾ã™ã€‚ + +5. ストレージマãƒãƒ¼ã‚¸ãƒ£ãƒ¼ã¯ã€è¦æ±‚ã•れãŸã‚¯ã‚¨ãƒªã®ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションをå—ã‘å–りã€DB データã«å¯¾ã™ã‚‹ã‚¯ã‚¨ãƒªã®å‡¦ç†ã‚’é–‹å§‹ã—〠+ クエリ応答を作æˆã—ã¾ã™ã€‚ + +6. ストレージマãƒãƒ¼ã‚¸ãƒ£ãƒ¼ã¯ã€ã‚¯ã‚¨ãƒªãƒžãƒãƒ¼ã‚¸ãƒ£ãƒ¼ãŒãƒªãƒƒã‚¹ãƒ³ã—ã¦ã„るクエリトピックã§ã‚¯ã‚¨ãƒªã®å¿œç­”をパブリッシュã—ã¾ã™ã€‚ + +7. QM ã¯ã€QueryResult トピックã‹ã‚‰ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションをå—ã‘å–りã¾ã™ã€‚ + +7.1. HTTP 応答を API gateway ã«é€ã‚Šè¿”ã—ã¾ã™ã€‚ + +7.2. API gateway ã¯ã€ã‚¨ãƒ³ãƒ‰ãƒ¦ãƒ¼ã‚¶ãƒ¼/リクエスターã«å¿œç­”ã‚’é€ã‚Šè¿”ã—ã¾ã™ã€‚ + +コンテキストソース登録 +###################### + +.. figure:: ../../en/source/figures/flow-4.png + +ã“ã®å›³ã¯ã€ScorpioBroker システムã§ã®ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚½ãƒ¼ã‚¹ãƒ¬ã‚¸ã‚¹ãƒˆãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã®æ“作フローを示ã—ã¦ã„ã¾ã™ã€‚マークã•れ㟠+ステップã®è§£é‡ˆã¯æ¬¡ã®ã¨ãŠã‚Šã§ã™: + +1. アプリケーションã¯ã€HTTP POST リクエストã®å½¢å¼ã§ (Service API gateway ã«ã‚ˆã£ã¦å…¬é–‹ã•れる) NGSI-LD 準拠㮠+ インターフェイスã‹ã‚‰ Csource registration を呼ã³å‡ºã—ã¾ã™ã€‚ + +2. リクエスト㯠Service API gateway ã«å…¥ã‚Šã¾ã™ã€‚ + +a. Service API gateway ã¯ã€Discovery & registry service ã‹ã‚‰ã€å®Ÿéš›ã«ã‚µãƒ¼ãƒ“スをæä¾›ã—ã¦ã„るマイクロサービス +エンドãƒã‚¤ãƒ³ãƒˆ (ç€ä¿¡è¦æ±‚を転é€ã™ã‚‹å¿…è¦ãŒã‚る場所) を検出ã—ã¾ã™ã€‚ + +b. Service API gateway ã¯ã€HTTP リクエストを Context Registry (CR) Manager マイクロサービスã«è»¢é€ã—ã¾ã™ã€‚ + +3. CR manager ã¯ã€ä»¥å‰ã«ä¿å­˜ã•れãŸãƒ‡ãƒ¼ã‚¿/エンティティをトピック "CSource" ã‹ã‚‰ãƒ•ェッãƒã™ã‚‹ã‚ˆã†ã«ãªã‚Šã¾ã—ãŸã€‚ + +a. リクエスト Csource ã®ã‚¨ãƒ³ãƒˆãƒªãŒã™ã§ã«å­˜åœ¨ã™ã‚‹å ´åˆã€å‡¦ç†ã‚’終了ã—ã€ãƒªã‚¯ã‚¨ã‚¹ã‚¿ãƒ¼ã«ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションã—ã¾ã™ã€‚ +存在ã—ãªã„å ´åˆã¯ã€ã•らã«å‡¦ç†ã‚’続行ã—ã¾ã™ã€‚ + +b. ã“れã§ã€CR manager ã¯ã„ãã¤ã‹ã®åŸºæœ¬çš„ãªæ¤œè¨¼ã‚’実行ã—ã¦ã€ã“ã‚ŒãŒæœ‰åйãªãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã‚’æŒã¤æœ‰åйãªè¦æ±‚ã§ã‚ã‚‹ã‹ã©ã†ã‹ã‚’ +確èªã—ã¾ã™ã€‚ + +c. CR manager ã¯ã€ã“ã®ãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã‚’ Csource ãƒˆãƒ”ãƒƒã‚¯ã«æ›¸ã込むよã†ã«ãªã‚Šã¾ã—ãŸã€‚ + +4. Storage Manager ã¯ã€Csource トピックをリッスンã—ç¶šã‘ã€æ–°ã—ã„ã‚¨ãƒ³ãƒˆãƒªã®æ›¸ãè¾¼ã¿ãŒã‚ã‚‹å ´åˆã¯ã€ãƒ‡ãƒ¼ã‚¿ãƒ™ãƒ¼ã‚¹ã§ç›¸å¯¾æ“作を +実行ã—ã¾ã™ã€‚ + +5. CR manager ã¯ã€Csource è¦æ±‚ã«å¯¾ã™ã‚‹å¿œç­”を準備ã—ã¾ã™ã€‚ + +5.1. HTTP 応答を API gateway ã«é€ã‚Šè¿”ã—ã¾ã™ã€‚ + +5.2. API gateway ã¯ã€å¿œç­”をエンドユーザー/リクエスターã«é€ã‚Šè¿”ã—ã¾ã™ã€‚ + +**注æ„**: Conext Source Update リクエストã®å ´åˆã€ãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã®ã¿ãŒå¤‰æ›´ã‚’å–å¾—ã—ã€ã‚¹ãƒ†ãƒƒãƒ—3 ã§æ—¢å­˜ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã®æ¤œè¨¼æ™‚ã« +終了ã›ãšã€å–å¾—ã—ãŸã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚’æ›´æ–°ã—㦠Kafka ã«æ›¸ã戻ã—ã¾ã™ã€‚残りã®ãƒ•ローã¯ã»ã¨ã‚“ã©åŒã˜ã¾ã¾ã§ã™ã€‚ + +コンテキストソースã®ã‚µãƒ–スクリプション +###################################### + +.. figure:: ../../en/source/figures/flow-5.png + +図㮠ScorpioBroker コンテキストソースサブスクリプションフローã¯ã€ScorpioBroker システムã§ã®ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚½ãƒ¼ã‚¹ +サブスクリプションã®é‹ç”¨ãƒ•ローを示ã—ã¦ã„ã¾ã™ã€‚マークã•れãŸã‚¹ãƒ†ãƒƒãƒ—ã®è§£é‡ˆã¯æ¬¡ã®ã¨ãŠã‚Šã§ã™: + +1. アプリケーションã¯ã€HTTP POST リクエストã®å½¢å¼ã§ (Service API gateway ã«ã‚ˆã£ã¦å…¬é–‹ã•れる) NGSI-LD 準拠㮠+ インターフェイスã‹ã‚‰ã€Csource updates を呼ã³å‡ºã—ã¾ã™ã€‚ + +2. リクエスト㯠Service API gateway ã«å…¥ã‚Šã¾ã™ã€‚ + +a. Service API gateway ã¯ã€Discovery & registry serviceã‹ã‚‰ã€å®Ÿéš›ã«ã‚µãƒ¼ãƒ“スをæä¾›ã—ã¦ã„るマイクロサービス +エンドãƒã‚¤ãƒ³ãƒˆ (ç€ä¿¡è¦æ±‚を転é€ã™ã‚‹å¿…è¦ãŒã‚る場所)を検出ã—ã¾ã™ã€‚ + +b. Service API gateway ã¯ã€HTTP リクエストを Context Registry (CR) Manager マイクロサービスã«è»¢é€ã—ã¾ã™ã€‚ + +3. CR manager ã¯ã€ä»¥å‰ã«ä¿å­˜ã•れãŸãƒ‡ãƒ¼ã‚¿/エンティティをトピック "CSourceSub" ã‹ã‚‰ãƒ•ェッãƒã™ã‚‹ã‚ˆã†ã«ãªã‚Šã¾ã—ãŸã€‚ + +a. ã“ã“ã§ã€CR manager ã¯ã„ãã¤ã‹ã®åŸºæœ¬çš„ãªæ¤œè¨¼ã‚’実行ã—ã¦ã€ã“ã‚ŒãŒæœ‰åйãªãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã‚’æŒã¤æœ‰åйãªè¦æ±‚ã§ã‚ã‚‹ã‹ã©ã†ã‹ã‚’ +確èªã—ã¾ã™ã€‚ + +b. リクエスト Csource subscription ã®ã‚¨ãƒ³ãƒˆãƒªãŒã™ã§ã«å­˜åœ¨ã™ã‚‹å ´åˆã€å‡¦ç†ã‚’終了ã—ã€ãƒªã‚¯ã‚¨ã‚¹ã‚¿ãƒ¼ã«ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーション +ã—ã¾ã™ã€‚存在ã—ãªã„å ´åˆã¯ã€ã•らã«å‡¦ç†ã‚’続行ã—ã¾ã™ã€‚ + +c. CR manager ã¯ã€ã“ã®ãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã‚’ CsourceSub ãƒˆãƒ”ãƒƒã‚¯ã«æ›¸ãè¾¼ã¿ã¾ã™ã€‚ + +d. 並行ã—ã¦ã€è¦æ±‚ã•れãŸã‚µãƒ–スクリプションã«ã¤ã„㦠Csource トピックをリッスンã™ã‚‹ç‹¬ç«‹ã—ãŸã‚¹ãƒ¬ãƒƒãƒ‰ã‚‚é–‹å§‹ã—ã€æ­£å¸¸ãªçŠ¶æ…‹ã« +ãªã‚‹ã¨ã€ã‚µãƒ–ã‚¹ã‚¯ãƒªãƒ—ã‚·ãƒ§ãƒ³ãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã§æä¾›ã•れる登録済ã¿ã‚¨ãƒ³ãƒ‰ãƒã‚¤ãƒ³ãƒˆã«ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションãŒé€ä¿¡ã•れã¾ã™ã€‚ + +4. Storage Manager ã¯ã€CsourceSub トピックをリッスンã—ç¶šã‘ã€æ–°è¦/æ›´æ–°ã•れãŸã‚¨ãƒ³ãƒˆãƒªã®æ›¸ãè¾¼ã¿ã«ã¤ã„ã¦ã¯ã€ãƒ‡ãƒ¼ã‚¿ãƒ™ãƒ¼ã‚¹ã§ + 相対æ“作を実行ã—ã¾ã™ã€‚ + +5. CR manager ã¯ã€Csource subscription è¦æ±‚ã®å¿œç­”を準備ã—〠+ +5.1. HTTP 応答をAPI gateway ã«é€ã‚Šè¿”ã—ã¾ã™ã€‚ + +5.2. API gateway ã¯ã€å¿œç­”をエンドユーザー/リクエスターã«é€ã‚Šè¿”ã—ã¾ã™ã€‚ + +ヒストリー (履歴) +################# + +.. figure:: ../../en/source/figures/flow-6.png + +ã“ã®å›³ã¯ã€ScorpioBroker システムã§ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚µãƒ–スクリプションã®é‹ç”¨ãƒ•ローを示ã—ã¦ã„ã¾ã™ã€‚マークã•れãŸã‚¹ãƒ†ãƒƒãƒ—ã® +è§£é‡ˆã¯æ¬¡ã®ã¨ãŠã‚Šã§ã™: + +1. アプリケーションã¯ã€HTTP POST リクエストã®å½¢å¼ã§ã€(Service API gateway ã«ã‚ˆã£ã¦å…¬é–‹ã•れる) NGSI-LD 準拠㮠+ インターフェイスã‹ã‚‰ History manager を呼ã³å‡ºã—ã¾ã™ã€‚ + +2. リクエスト㯠Service API gateway ã«å…¥ã‚Šã¾ã™ã€‚ + +a. Service API gateway ã¯ã€Discovery & registry service ã‹ã‚‰ã€å®Ÿéš›ã«ã‚µãƒ¼ãƒ“スをæä¾›ã—ã¦ã„るマイクロサービス +エンドãƒã‚¤ãƒ³ãƒˆ (ç€ä¿¡è¦æ±‚を転é€ã™ã‚‹å¿…è¦ãŒã‚る場所) を検出ã—ã¾ã™ã€‚ + +b. Service API gateway ã¯ã€HTTP リクエストを History manager マイクロサービスã«è»¢é€ã—ã¾ã™ã€‚ + +3. History manager ã¯ã€å—ä¿¡ã—ãŸãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã«å¯¾ã—㦠EVA アルゴリズムアプローãƒã‚’実行ã—ã€ãƒšã‚¤ãƒ­ãƒ¼ãƒ‰å±žæ€§ã‚’ Kafka トピック +“TEMPORALENTITY†ã«ãƒ—ッシュã—ã¾ã™ã€‚ + +**注æ„**: History Manager ã¯ã€ã‚ªãƒ–ジェクトã®ãƒ«ãƒ¼ãƒˆãƒ¬ãƒ™ãƒ«ã§å„属性をウォークスルーã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ +(@id 㨠@type を除ã)。å„属性内ã§ã€å„インスタンス (é…列è¦ç´ ) をウォークスルーã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚次ã«ã€ç¾åœ¨ã® +オブジェクトを Kafka トピック TEMPORALENTITY ã«é€ä¿¡ã—ã¾ã™ã€‚ + +4. History Manager ã¯ã€"TEMPORALENTITY" ãƒˆãƒ”ãƒƒã‚¯ã¨æ–°ã—ã„エントリをリッスンã—ç¶šã‘ã€ãƒ‡ãƒ¼ã‚¿ãƒ™ãƒ¼ã‚¹ã§ç›¸å¯¾æ“作を実行ã—ã¾ã™ã€‚ diff --git a/scorpio-broker/docs/ja/source/config.rst b/scorpio-broker/docs/ja/source/config.rst new file mode 100644 index 0000000000000000000000000000000000000000..0be52ab5e6f1fc2a8e455b95a82d83759371045f --- /dev/null +++ b/scorpio-broker/docs/ja/source/config.rst @@ -0,0 +1,176 @@ +************************ +Scorpio ã®æ§‹æˆãƒ‘ラメータ +************************ + +ã“ã®ã‚»ã‚¯ã‚·ãƒ§ãƒ³ã§ã¯ã€Scorpio broker ã«å¿…è¦ãªã™ã¹ã¦ã®åŸºæœ¬æ§‹æˆã«ã¤ã„ã¦èª¬æ˜Žã—ã¾ã™ã€‚ã“れã¯ã€Scorpio ã®ã•ã¾ã–ã¾ãª +マイクロサービスã®åŸºæœ¬çš„ãªãƒ†ãƒ³ãƒ—レートã¨ã—ã¦ä½¿ç”¨ã§ãã¾ã™ã€‚ + +ã•ã¾ã–ã¾ãªæ§‹æˆãƒ‘ラメータã®èª¬æ˜Ž +############################## + +1. **server**:- ã“ã“ã«ã¯ã€ãƒ¦ãƒ¼ã‚¶ãƒ¼ã¯ã€ **ãƒãƒ¼ãƒˆ** ã‚„ Tomcat サーãƒãƒ¼ã® **ã‚¹ãƒ¬ãƒƒãƒ‰ã®æœ€å¤§æ•°** ãªã©ã®ã•ã¾ã–ã¾ãªã‚µãƒ¼ãƒãƒ¼é–¢é€£ +パラメーターを定義ã§ãã¾ã™ã€‚ã“れã¯ã€ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“ス通信ã«é–¢é€£ã—ã¦ã„ã¾ã™ã€‚å¤‰æ›´ã«æ³¨æ„ã—ã¦ãã ã•ã„。 + +.. code-block:: JSON + + server: + port: XXXX + tomcat: + max: + threads: XX + +2. **Entity Topics**:- ã“れらã¯ã€Kafka ã§ã® Scorpio ã®å†…部コミュニケーションã«ä½¿ç”¨ã•れるトピックã§ã™ã€‚ +ã“れを変更ã™ã‚‹å ´åˆã¯ã€ã‚½ãƒ¼ã‚¹ã‚³ãƒ¼ãƒ‰ã®å†…容も変更ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ + +.. code-block:: JSON + + entity: + topic: XYZ + create: + topic: XYZ + append: + topic: XYZ + update: + topic: XYZ + delete: + topic: XYZ + index: + topic: XYZ + +3. **batchoperations**:- NGSI-LD æ“作ã«ã‚ˆã£ã¦å®šç¾©ã•れãŸãƒãƒƒãƒæ“作ã®åˆ¶é™ã‚’定義ã™ã‚‹ãŸã‚ã«ä½¿ç”¨ã•れã¾ã™ã€‚ +ã“れ㯠HTTP サーãƒãƒ¼ã®æ§‹æˆã¨ãƒãƒ¼ãƒ‰ã‚¦ã‚§ã‚¢ã«é–¢é€£ã—ã¦ã„ã¾ã™ã€‚注æ„ã—ã¦å¤‰æ›´ã—ã¦ãã ã•ã„。 + +.. code-block:: JSON + + batchoperations: + maxnumber: + create: XXXX + update: XXXX + upsert: XXXX + delete: XXXX + +4. **bootstrap**:- Kafka broker ã® URL を定義ã™ã‚‹ãŸã‚ã«ä½¿ç”¨ã•れã¾ã™ã€‚Kafka ã®è¨­å®šã‚’変更ã—ãŸå ´åˆã«ã®ã¿å¤‰æ›´ã—ã¦ãã ã•ã„。 + +.. code-block:: JSON + + bootstrap: + servers: URL + +5. **Csources Topics**:- ã“れらã¯ã€Kafka ã§ã® Scorpio ã®å†…部コミュニケーションã«ä½¿ç”¨ã•れるトピックã§ã™ã€‚ã“れを変更ã™ã‚‹ +å ´åˆã¯ã€ã‚½ãƒ¼ã‚¹ã‚³ãƒ¼ãƒ‰ã®å†…容も変更ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ + +.. code-block:: JSON + + registration: + topic: CONTEXT_REGISTRY + +6. **append**:- エンティティ㮠append overwrite オプションを定義ã™ã‚‹ãŸã‚ã«ä½¿ç”¨ã•れã¾ã™ã€‚ç´°å¿ƒã®æ³¨æ„を払ã£ã¦äº¤æ›ã—㦠+ãã ã•ã„。 + +.. code-block:: JSON + + append: + overwrite: noOverwrite + + +7. **spring**:- サービスåãªã©ã®ãƒ—ロジェクトã®åŸºæœ¬çš„ãªè©³ç´°ã‚’定義ã™ã‚‹ãŸã‚ã€ãŠã‚ˆã³ Kafka, flyway, データソースã€ã‚¯ãƒ©ã‚¦ãƒ‰ã® +æ§‹æˆã®è©³ç´°ã‚’æä¾›ã™ã‚‹ãŸã‚ã«ä½¿ç”¨ã•れã¾ã™ã€‚何をã—ã¦ã„ã‚‹ã®ã‹ã‚ã‹ã‚‰ãªã„é™ã‚Šã€ã“れらを変更ã—ãªã„ã§ãã ã•ã„ï¼ + +.. code-block:: JSON + + spring: + application: + name: serviceName + main: + lazy-initialization: true + kafka: + admin: + properties: + cleanup: + policy: compact + flyway: + baselineOnMigrate: true + cloud: + stream: + kafka: + binder: + brokers: localhost:9092 + bindings: + ATCONTEXT_WRITE_CHANNEL: + destination: ATCONTEXT + contentType: application/json + datasource: + url: "jdbc:postgresql://127.0.0.1:5432/ngb?ApplicationName=ngb_querymanager" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP + maxLifetime: 2000000 + connectionTimeout: 30000 + + +8. **query Topics**:- ã“れらã¯ã€Kafka ã§ã® Scorpio ã®å†…部コミュニケーションã«ä½¿ç”¨ã•れるトピックã§ã™ã€‚ã“れを変更ã™ã‚‹ +å ´åˆã¯ã€ã‚½ãƒ¼ã‚¹ã‚³ãƒ¼ãƒ‰ã®å†…容も変更ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ + +.. code-block:: JSON + + query: + topic: QUERY + result: + topic: QUERY_RESULT + +9. **atcontext**:- æ··åˆã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆãŒãƒ˜ãƒƒãƒ€ãƒ¼ã‚’介ã—ã¦æä¾›ã•れるシナリオã§ã€Scorpio ã«ã‚ˆã£ã¦æä¾›ã•れるコンテキスト㮠+URL を定義ã™ã‚‹ãŸã‚ã«ä½¿ç”¨ã•れã¾ã™ã€‚ + +.. code-block:: JSON + + atcontext: + url: http://:/ngsi-ld/contextes/ + +10. **Key**:- 逆シリアル化用ã®ãƒ•ァイルを定義ã™ã‚‹ãŸã‚ã«ä½¿ç”¨ã•れã¾ã™ã€‚変更ã—ãªã„ã§ãã ã•ã„ï¼ + +.. code-block:: JSON + + key: + deserializer: org.apache.kafka.common.serialization.StringDeserializer + +11. **reader**:- データベースを Scorpio broker ã«æ§‹æˆã™ã‚‹ãŸã‚ã«ä½¿ç”¨ã•れã€ã™ã¹ã¦ã®èª­ã¿å–りæ“作を実行ã™ã‚‹ãŸã‚ã«å¿…è¦ã§ã™ã€‚ +ã“ã®ä¾‹ã¯ã€ãƒ­ãƒ¼ã‚«ãƒ«ã«ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã•れ㟠PostgresDB ã®ãƒ‡ãƒ•ォルト設定ã«åŸºã¥ã„ã¦ã„ã¾ã™ã€‚ + +.. code-block:: JSON + + reader: + enabled: true + datasource: + url: "jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_storagemanager_reader" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP_Reader + maxLifetime: 2000000 + connectionTimeout: 30000 + +12. **writer**:- データベースを Scorpio broker ã«æ§‹æˆã™ã‚‹ãŸã‚ã«ä½¿ç”¨ã•れã€ã™ã¹ã¦ã®æ›¸ãè¾¼ã¿æ“作を実行ã™ã‚‹ãŸã‚ã«å¿…è¦ã§ã™ã€‚ +ã“ã®ä¾‹ã¯ã€ãƒ­ãƒ¼ã‚«ãƒ«ã«ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã•れ㟠PostgresDB ã®ãƒ‡ãƒ•ォルト構æˆã«åŸºã¥ã„ã¦ã„ã¾ã™ã€‚ + +.. code-block:: JSON + + writer: + enabled: true + datasource: + url: "jdbc:postgresql://localhost:5432/ngb?ApplicationName=ngb_storagemanager_writer" + username: ngb + password: ngb + hikari: + minimumIdle: 5 + maximumPoolSize: 20 + idleTimeout: 30000 + poolName: SpringBootHikariCP_Writer + maxLifetime: 2000000 + connectionTimeout: 30000 diff --git a/scorpio-broker/docs/ja/source/contributionGuideline.rst b/scorpio-broker/docs/ja/source/contributionGuideline.rst new file mode 100644 index 0000000000000000000000000000000000000000..9dcf503df2b44c81e248a0d2a6df4b0c4b80d1f7 --- /dev/null +++ b/scorpio-broker/docs/ja/source/contributionGuideline.rst @@ -0,0 +1,49 @@ +******************************** +コントリビューションガイドライン +******************************** + +ブランãƒç®¡ç†ã‚¬ã‚¤ãƒ‰ãƒ©ã‚¤ãƒ³ +######################## + +.. figure:: ../../en/source/figures/gitGuideline.jpg + +コミュニティã¯ã€ç„¡é™ã®å­˜ç¶šæœŸé–“ã‚’æŒã¤2ã¤ã®ä¸»è¦ãªãƒ–ランãƒã‚’æŒã¤ã“ã¨ãŒã§ãã¾ã™: + +1. **Master branch**: ã“れã¯éžå¸¸ã«å®‰å®šã—ãŸãƒ–ランãƒã§ã‚りã€å¸¸ã«æœ¬ç•ªç’°å¢ƒã«å¯¾å¿œã—ã¦ãŠã‚Šã€æœ¬ç•ªç’°å¢ƒã§ã®ã‚½ãƒ¼ã‚¹ã‚³ãƒ¼ãƒ‰ã® + 最新リリースãƒãƒ¼ã‚¸ãƒ§ãƒ³ãŒå«ã¾ã‚Œã¦ã„ã¾ã™ã€‚ +2. **Development branch**: マスターブランãƒã‹ã‚‰æ´¾ç”Ÿã—ãŸé–‹ç™ºãƒ–ランãƒã¯ã€æ¬¡ã®ãƒªãƒªãƒ¼ã‚¹ã§è¨ˆç”»ã•れã¦ã„ã‚‹ã•ã¾ã–ã¾ãªæ©Ÿèƒ½ã‚’ + çµ±åˆã™ã‚‹ãŸã‚ã®ãƒ–ランãƒã¨ã—ã¦æ©Ÿèƒ½ã—ã¾ã™ã€‚ã“ã®ãƒ–ランãƒã¯ã€ãƒžã‚¹ã‚¿ãƒ¼ãƒ–ランãƒã»ã©å®‰å®šã—ã¦ã„ã‚‹å ´åˆã¨ãã†ã§ãªã„å ´åˆãŒ + ã‚りã¾ã™ã€‚ã“れã¯ã€é–‹ç™ºè€…ãŒã‚³ãƒ©ãƒœãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã—ã¦æ©Ÿèƒ½ãƒ–ランãƒã‚’マージã™ã‚‹å ´æ‰€ã§ã™ã€‚ã™ã¹ã¦ã®å¤‰æ›´ã¯ã€ä½•らã‹ã®æ–¹æ³•ã§ + マスターã«ãƒžãƒ¼ã‚¸ã—ã¦æˆ»ã—ã€ãƒªãƒªãƒ¼ã‚¹ç•ªå·ã§ã‚¿ã‚°ä»˜ã‘ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ + +ã“れらã®2ã¤ã®ä¸»è¦ãªãƒ–ランãƒã¨ã¯åˆ¥ã«ã€ãƒ¯ãƒ¼ã‚¯ãƒ•ローã«ã¯ä»–ã®ãƒ–ランãƒãŒã‚りã¾ã™: + +- **Feature Branch**: 機能開発ã€ã¤ã¾ã‚Šæ‹¡å¼µã¾ãŸã¯ãƒ‰ã‚­ãƒ¥ãƒ¡ãƒ³ãƒˆåŒ–ã®ãŸã‚ã«é–‹ç™ºãƒ–ランãƒã‹ã‚‰åˆ†å²ã—ã¾ã™ã€‚機能開発ã¾ãŸã¯ + 拡張機能ã®å®Ÿè£…後ã€é–‹ç™ºãƒ–ランãƒã«ãƒžãƒ¼ã‚¸ã•れã¾ã—ãŸã€‚ +- **Bug Branch**: 開発ブランãƒã‹ã‚‰åˆ†å²ã—ã¾ã™ã€‚ãƒã‚°ä¿®æ­£å¾Œã€é–‹ç™ºãƒ–ランãƒã«ãƒžãƒ¼ã‚¸ã•れã¾ã—ãŸã€‚ +- **Hotfix branch**: 修正プログラムブランãƒã¯ã€ãƒžã‚¹ã‚¿ãƒ¼ãƒ–ランãƒã‹ã‚‰ä½œæˆã•れã¾ã™ã€‚ã“れã¯ç¾åœ¨ã®è£½å“リリースã§ã‚り〠+ ライブã§å®Ÿè¡Œã•れã¦ãŠã‚Šã€é‡å¤§ãªãƒã‚°ãŒåŽŸå› ã§å•題ãŒç™ºç”Ÿã—ã¦ã„ã¾ã™ã€‚ã—ã‹ã—ã€é–‹ç™ºã®å¤‰åŒ–ã¯ã¾ã ä¸å®‰å®šã§ã™ã€‚ãã®å¾Œã€ + ホットフィックスブランãƒã‹ã‚‰åˆ†å²ã—ã¦ã€å•題ã®ä¿®æ­£ã‚’é–‹å§‹ã™ã‚‹å ´åˆãŒã‚りã¾ã™ã€‚é‡å¤§ãªãƒã‚°ã®ã¿ã®å ´åˆã€ã“ã‚Œã¯æœ€ã‚‚ã¾ã‚Œãª + 機会ã§ã‚ã‚‹ã¯ãšã§ã™ã€‚ + +**注æ„**: 修正プログラムブランãƒã‚’作æˆãŠã‚ˆã³ãƒžãƒ¼ã‚¸ã™ã‚‹æ¨©é™ã‚’æŒã£ã¦ã„ã‚‹ã®ã¯ã€NLE ãŠã‚ˆã³ NECTI メンãƒãƒ¼ã®ã¿ã§ã™ã€‚ + ++------------------+-------------------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------+ +| ブランム| ブランãƒå‘½åガイドライン | 備考 | ++==================+=====================================================================================================================================+=====================================================================================================+ +| Feature branches | *development* ã‹ã‚‰åˆ†å²ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚*development* ã«ãƒžãƒ¼ã‚¸ã—ã¦æˆ»ã™å¿…è¦ãŒã‚りã¾ã™ã€‚ブランãƒã®å‘½åè¦å‰‡: *feature-feature_id* | *feature_id* ã¯ã€**https://github.com/ScorpioBroker/ScorpioBroker/issues** ã® Github Issue ID ã§ã™ã€‚| ++------------------+-------------------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------+ +| Bug Branches | *development* ã‹ã‚‰åˆ†å²ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚*development* ã«ãƒžãƒ¼ã‚¸ã—ã¦æˆ»ã™å¿…è¦ãŒã‚りã¾ã™ã€‚ブランãƒã®å‘½åè¦å‰‡: *bug-bug_id* | *bug_id* ã¯ã€**https://github.com/ScorpioBroker/ScorpioBroker/issues** ã® Github Issue ID ã§ã™ã€‚ | ++------------------+-------------------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------+ +| Hotfix Branches | *master branch* ã‹ã‚‰åˆ†å²ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚*master branch* ã«ãƒžãƒ¼ã‚¸ã—ã¦æˆ»ã™å¿…è¦ãŒã‚りã¾ã™ã€‚ブランãƒã®å‘½åè¦å‰‡: *hotfix-bug number* |*Bug number* ã¯ã€**https://github.com/ScorpioBroker/ScorpioBroker/issues** ã® Github Issue ID ã§ã™ã€‚ | ++------------------+-------------------------------------------------------------------------------------------------------------------------------------+-----------------------------------------------------------------------------------------------------+ + +ブランãƒã¸ã®ã‚¢ã‚¯ã‚»ã‚¹è¨±å¯ +************************ + +- **Master** - マスターブランãƒã§ãƒžãƒ¼ã‚¸ã—ã¦ãƒ—ルリクエストをå—ã‘入れるã“ã¨ãŒã§ãã‚‹ã®ã¯ã€NEC Laboratories Europe (NLE) + メンãƒãƒ¼ã¨ NEC Technologies India (NECTI) ã®ç‰¹æ¨©ãƒ¡ãƒ³ãƒãƒ¼ã®ã¿ã§ã‚ã‚‹ã¨ã„ã†éžå¸¸ã«å޳ã—ã„傾å‘ãŒã‚りã¾ã™ã€‚マスターã¸ã® + プルリクエストã¯ã€NECTI ã¾ãŸã¯ NLE メンãƒãƒ¼ã®ã¿ãŒä½œæˆã§ãã¾ã™ã€‚ +- **Development** - コミュニティメンãƒãƒ¼ã¯èª°ã§ã‚‚プルリクエストを開発ブランãƒã«æå‡ºã§ãã¾ã™ãŒã€NLE ã¾ãŸã¯ NECTI + メンãƒãƒ¼ãŒãƒ¬ãƒ“ューã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚開発ブランãƒã®ã‚³ãƒŸãƒƒãƒˆã¯ã€NGSI-LD テストスイートã§è¨˜è¿°ã•れãŸã™ã¹ã¦ã®ãƒ†ã‚¹ãƒˆ + ã‚±ãƒ¼ã‚¹ãŒæ­£å¸¸ã«å®Ÿè¡Œã•れãŸå ´åˆã«ã®ã¿ã€ãƒžã‚¹ã‚¿ãƒ¼ãƒ–ランãƒã«ç§»å‹•ã•れã¾ã™ã€‚ diff --git a/scorpio-broker/docs/ja/source/docker.rst b/scorpio-broker/docs/ja/source/docker.rst new file mode 100644 index 0000000000000000000000000000000000000000..dec796feaa527411bf895d4d3a3fb1e4d2229473 --- /dev/null +++ b/scorpio-broker/docs/ja/source/docker.rst @@ -0,0 +1,101 @@ +********************* +Docker コンテナã®å–å¾— +********************* + +ç¾åœ¨ã® Maven ビルドã¯ã€Maven プロファイルを使用ã—ã¦ãƒ“ルドã‹ã‚‰ãƒˆãƒªã‚¬ãƒ¼ã™ã‚‹2種類㮠Docker コンテナー生æˆã‚’サãƒãƒ¼ãƒˆã—㦠+ã„ã¾ã™ã€‚ + +最åˆã®ãƒ—ロファイル㯠'docker' ã¨å‘¼ã°ã‚Œã€æ¬¡ã®ã‚ˆã†ã«å‘¼ã³å‡ºã™ã“ã¨ãŒã§ãã¾ã™: + +:: + + mvn clean package -DskipTests -Pdocker + +ã“れã«ã‚ˆã‚Šã€ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スã”ã¨ã«å€‹åˆ¥ã® Docker コンテナãŒç”Ÿæˆã•れã¾ã™ã€‚対応ã™ã‚‹ docker-compose ファイル㯠+`docker-compose-dist.yml` ã§ã™ã€‚ + +2番目ã®ãƒ—ロファイル㯠'docker-aaio' ã¨å‘¼ã°ã‚Œã¾ã™ (ã»ã¼ã™ã¹ã¦ãŒ1ã¤ã«ãªã£ã¦ã„ã¾ã™)。ã“れã«ã‚ˆã‚Šã€Kafka メッセージãƒã‚¹ã¨ +Postgres データベースを除ãブローカーã®ã™ã¹ã¦ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã«å¯¾ã—ã¦å˜ä¸€ã® Docker コンテナーãŒç”Ÿæˆã•れã¾ã™ã€‚ + +aaio ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã‚’å–å¾—ã™ã‚‹ã«ã¯ã€æ¬¡ã®ã‚ˆã†ã« Maven ビルドを実行ã—ã¾ã™: + +:: + + mvn clean package -DskipTests -Pdocker-aaio + +対応ã™ã‚‹docker-compose ファイル㯠`docker-compose-aaio.yml` ã§ã™ã€‚ + +Kafkadocker イメージ㨠docker-compose ã«é–¢ã™ã‚‹ä¸€èˆ¬çš„ãªæ³¨æ„ +========================================================== + +Kafka Docker コンテナーã§ã¯ã€ç’°å¢ƒå¤‰æ•° `KAFKA_ADVERTISED_HOST_NAME` を指定ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ã“れã¯ã€docker-compose +ファイル㧠Docker ホスト IP ã¨ä¸€è‡´ã™ã‚‹ã‚ˆã†ã«å¤‰æ›´ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚`127.0.0.1` を使用ã§ãã¾ã™ãŒã€ã“れã«ã‚ˆã‚Šã€Kafka +をクラスターモードã§å®Ÿè¡Œã§ããªããªã‚Šã¾ã™ã€‚ + +詳細ã«ã¤ã„ã¦ã¯ã€https://hub.docker.com/r/wurstmeister/kafka ã‚’å‚ç…§ã—ã¦ãã ã•ã„。 + +Maven ã®å¤–部㧠Docker ビルドを実行 +================================== + +jar ã®ãƒ“ルドを Docker ビルドã‹ã‚‰åˆ†é›¢ã—ãŸã„å ´åˆã¯ã€ç‰¹å®šã® VARS ã‚’ Docker ã«æä¾›ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚次ã®ãƒªã‚¹ãƒˆã¯ã€ +ルートディレクトリã‹ã‚‰ docker build を実行ã—ãŸå ´åˆã®ã€ã™ã¹ã¦ã®å¤‰æ•°ã¨ãã®æ„図ã•れãŸå€¤ã‚’示ã—ã¦ã„ã¾ã™ã€‚ +  + - BUILD_DIR_ACS = Core/AtContextServer +  + - BUILD_DIR_SCS = SpringCloudModules/config-server +  + - BUILD_DIR_SES = SpringCloudModules/eureka +  + - BUILD_DIR_SGW = SpringCloudModules/gateway +  + - BUILD_DIR_HMG = History/HistoryManager +  + - BUILD_DIR_QMG = Core/QueryManager +  + - BUILD_DIR_RMG = Registry/RegistryManager +  + - BUILD_DIR_EMG = Core/EntityManager +  + - BUILD_DIR_STRMG = Storage/StorageManager +  + - BUILD_DIR_SUBMG = Core/SubscriptionManager + + - JAR_FILE_BUILD_ACS = AtContextServer-${project.version}.jar +  + - JAR_FILE_BUILD_SCS = config-server-${project.version}.jar +  + - JAR_FILE_BUILD_SES = eureka-server-${project.version}.jar +  + - JAR_FILE_BUILD_SGW = gateway-${project.version}.jar +  + - JAR_FILE_BUILD_HMG = HistoryManager-${project.version}.jar +  + - JAR_FILE_BUILD_QMG = QueryManager-${project.version}.jar +  + - JAR_FILE_BUILD_RMG = RegistryManager-${project.version}.jar +  + - JAR_FILE_BUILD_EMG = EntityManager-${project.version}.jar +  + - JAR_FILE_BUILD_STRMG = StorageManager-${project.version}.jar +  + - JAR_FILE_BUILD_SUBMG = SubscriptionManager-${project.version}.jar + + - JAR_FILE_RUN_ACS = AtContextServer.jar +  + - JAR_FILE_RUN_SCS = config-server.jar +  + - JAR_FILE_RUN_SES = eureka-server.jar +  + - JAR_FILE_RUN_SGW = gateway.jar +  + - JAR_FILE_RUN_HMG = HistoryManager.jar +  + - JAR_FILE_RUN_QMG = QueryManager.jar +  + - JAR_FILE_RUN_RMG = RegistryManager.jar +  + - JAR_FILE_RUN_EMG = EntityManager.jar +  + - JAR_FILE_RUN_STRMG = StorageManager.jar +  + - JAR_FILE_RUN_SUBMG = SubscriptionManager.jar diff --git a/scorpio-broker/docs/ja/source/errorHandling.rst b/scorpio-broker/docs/ja/source/errorHandling.rst new file mode 100644 index 0000000000000000000000000000000000000000..d329eb6a214bab901bc9c5add6ba559507758c14 --- /dev/null +++ b/scorpio-broker/docs/ja/source/errorHandling.rst @@ -0,0 +1,92 @@ +********************** +Scorpio ã§ã®ã‚¨ãƒ©ãƒ¼å‡¦ç† +********************** + +ã“ã®ã‚»ã‚¯ã‚·ãƒ§ãƒ³ã§ã¯ã€ScorpioBroker システムã®ã‚¨ãƒ©ãƒ¼å‡¦ç†ãƒ¡ã‚«ãƒ‹ã‚ºãƒ ã«é–¢ã™ã‚‹æƒ…報をæä¾›ã—ã¾ã™ã€‚ + +以下ã«ãƒªã‚¹ãƒˆã•れã¦ã„ã‚‹ã®ã¯ã€ã‚·ã‚¹ãƒ†ãƒ ã®ã‚¤ãƒ™ãƒ³ãƒˆã§ã™ã€‚ + +.. list-table:: **エラー処ç†** + :widths: 5 15 35 15 10 20 + :header-rows: 1 + + * - S.No. + - オペレーション/イベント + - シナリオã®èª¬æ˜Ž + - 関連モジュール + - エラーコード/レスãƒãƒ³ã‚¹ + - アクション +     + * - 1. + - InvalidRequest + - オペレーションã«é–¢é€£ä»˜ã‘られãŸãƒªã‚¯ã‚¨ã‚¹ãƒˆãŒæ§‹æ–‡çš„ã«ç„¡åйã§ã‚ã‚‹ã‹ã€é–“é•ã£ãŸã‚³ãƒ³ãƒ†ãƒ³ãƒ„ãŒå«ã¾ã‚Œã¦ã„ã¾ã™ + - REST Controller + - HTTP 400 + - エラーをログã«è¨˜éŒ²ã—ã€ãƒªã‚¯ã‚¨ã‚¹ã‚¿ãƒ¼ã«é€šçŸ¥ã—ã¾ã™ + + * - 2. + - BadRequestData + - リクエストã«ã¯ã€ã‚ªãƒšãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã®è¦ä»¶ã‚’満ãŸã•ãªã„入力データãŒå«ã¾ã‚Œã¦ã„ã¾ã™ + - REST Controller + - HTTP 400 + - エラーをログã«è¨˜éŒ²ã—ã€ãƒªã‚¯ã‚¨ã‚¹ã‚¿ãƒ¼ã«é€šçŸ¥ã—ã¾ã™ + + * - 3. + - AlreadyExists + - å‚ç…§ã•れãŸè¦ç´ ã¯ã™ã§ã«å­˜åœ¨ã—ã¾ã™ + - REST Controller + - HTTP 409 + - エラーをログã«è¨˜éŒ²ã—ã€ãƒªã‚¯ã‚¨ã‚¹ã‚¿ãƒ¼ã«é€šçŸ¥ã—ã¾ã™ + + * - 4. + - OperationNotSupported + - オペレーションã¯ã‚µãƒãƒ¼ãƒˆã•れã¦ã„ã¾ã›ã‚“ + - REST Controller + - HTTP 422 + - エラーをログã«è¨˜éŒ²ã—ã€ãƒªã‚¯ã‚¨ã‚¹ã‚¿ãƒ¼ã«é€šçŸ¥ã—ã¾ã™ + + * - 5. + - ResourceNotFound + - å‚ç…§ã•れãŸãƒªã‚½ãƒ¼ã‚¹ãŒè¦‹ã¤ã‹ã‚Šã¾ã›ã‚“ + - REST Controller + - HTTP 404 + - エラーをログã«è¨˜éŒ²ã—ã€ãƒªã‚¯ã‚¨ã‚¹ã‚¿ãƒ¼ã«é€šçŸ¥ã—ã¾ã™ + + * - 6. + - InternalError + - オペレーション実行中ã«ã‚¨ãƒ©ãƒ¼ãŒç™ºç”Ÿã—ã¾ã—㟠+ - REST Controller + - HTTP 500 + - エラーをログã«è¨˜éŒ²ã—ã€ãƒªã‚¯ã‚¨ã‚¹ã‚¿ãƒ¼ã«é€šçŸ¥ã—ã¾ã™ + + * - 7. + - Method Not Allowed + - クライアントãŒãƒªã‚½ãƒ¼ã‚¹ã«å¯¾ã—ã¦é–“é•ã£ãŸ HTTP 動詞を呼ã³å‡ºã™ã¨ã‚¨ãƒ©ãƒ¼ãŒç™ºç”Ÿã—ã¾ã—㟠+ - REST Controller + - HTTP 405 + - エラーをログã«è¨˜éŒ²ã—ã€ãƒªã‚¯ã‚¨ã‚¹ã‚¿ãƒ¼ã«é€šçŸ¥ã—ã¾ã™ + +エラーã¯ã€å®Ÿè£…ロジックã®å†…部ã§ã‚‚発生ã™ã‚‹å¯èƒ½æ€§ã®ã‚ã‚‹ã•ã¾ã–ã¾ãªä¾‹å¤–ã«ã¤ã„ã¦ã€æ¬¡ã®ã‚«ãƒ†ã‚´ãƒªã«åˆ†é¡žã™ã‚‹ã“ã¨ã‚‚ã§ãã‚‹ã“ã¨ã« +注æ„ã—ã¦ãã ã•ã„: + +1. 低ã„é‡è¦åº¦ (Low criticality) ã¨ã¯ã€ã‚½ãƒ•トウェアロジックã§å‡¦ç†ã™ã‚‹å¿…è¦ã®ã‚るエラーをå«ã¿ã€æ§‹æˆã®å•題ãŒåŽŸå› ã§ã‚り〠+リセットやシステムã®å†èµ·å‹•ãªã©ã‚’å¿…è¦ã¨ã—ãªã„ã‚‚ã®ã§ã™ã€‚ + +2. 中程度ã®é‡è¦åº¦ (Medium criticality) ã¯ã€ã‚½ãƒ•トウェアロジックã®å‡¦ç†ã®ãŸã‚ã«è©¦è¡Œã•れるもã®ã§ã™ãŒã€ã‚·ã‚¹ãƒ†ãƒ ãƒªã‚»ãƒƒãƒˆã€ +ãƒãƒƒãƒ—リセットãŒå¿…è¦ãªå ´åˆãŒã‚りã€ã‚·ã‚¹ãƒ†ãƒ ãŒå¤§å¹…ã«ä¸­æ–­ã•れる場åˆãŒã‚りã¾ã™ã€‚ + +3. 高ã„é‡è¦åº¦ (High criticality) ã¯ã€ç™ºç”Ÿã—ã¦ã¯ãªã‚‰ãªã„ãƒãƒ¼ãƒ‰ã‚¦ã‚§ã‚¢ãƒ™ãƒ¼ã‚¹ã®ã‚¨ãƒ©ãƒ¼ã§ã‚りã€ç™ºç”Ÿã—ãŸå ´åˆã¯ã‚·ã‚¹ãƒ†ãƒ ã® +リセットãŒå¿…è¦ã«ãªã‚‹å ´åˆãŒã‚りã¾ã™ã€‚ + +ã•ã¾ã–ã¾ãªã‚«ãƒ†ã‚´ãƒªã®ã‚¨ãƒ©ãƒ¼ã«å¯¾ã™ã‚‹ãƒ•ェイルセーフメカニズム: + +a. 低ã„é‡å¤§åº¦ (Low criticality) ã®ã‚¨ãƒ©ãƒ¼ã®å ´åˆã€ãƒ­ã‚®ãƒ³ã‚°ãŒå®Ÿè¡Œã•れã€å†è©¦è¡ŒãŒå®Ÿè¡Œã•れã€ãƒ­ãƒ¼ãƒ«ãƒãƒƒã‚¯ã¨ä¸Šä½å±¤ã¸ã®å¤±æ•—ã® +é€ä¿¡ã«ã‚ˆã£ã¦ã‚¨ãƒ©ãƒ¼ãŒå‡¦ç†ã•れã¾ã™ã€‚ + +b. 高ã„é‡å¤§åº¦ (High criticality) ã®ã‚¨ãƒ©ãƒ¼ã®å ´åˆã€ç·Šæ€¥ã‚¨ãƒ©ãƒ¼ãŒãƒ­ã‚°ã«è¨˜éŒ²ã•れ〠+å†èµ·å‹•ãŒæŽ¨å¥¨ã•れã¾ã™ã€‚ + +c. 中程度ã®é‡å¤§åº¦ (Medium criticality) ã®ã‚¨ãƒ©ãƒ¼ã®å ´åˆã€å†è©¦è¡Œãƒ¡ã‚«ãƒ‹ã‚ºãƒ ãŒå®Ÿè£…ã•れã€ç·Šæ€¥ãƒ­ã‚°ãŒã‚·ã‚¹ãƒ†ãƒ ã«ã•らã«ãƒ­ã‚°ã« +記録ã•れã€ç®¡ç†è€…ã«å†èµ·å‹•ã™ã‚‹ã“ã¨ã‚’ãŠå‹§ã‚ã—ã¾ã™ã€‚ + +åˆæœŸåŒ–中ã®éšœå®³ã¯ç·Šæ€¥äº‹æ…‹ (emergency) ã¨ã—ã¦ãƒ­ã‚°ã«è¨˜éŒ²ã•れã€ã‚¨ãƒ©ãƒ¼ã¯å‘¼ã³å‡ºã—å…ƒã®ãƒ—ログラムã«è¿”ã•れã¾ã™ã€‚ diff --git a/scorpio-broker/docs/ja/source/hardwareRequirement.rst b/scorpio-broker/docs/ja/source/hardwareRequirement.rst new file mode 100644 index 0000000000000000000000000000000000000000..97d2f241e5b3da4f8db657a73bc7c8e0c1ba05d9 --- /dev/null +++ b/scorpio-broker/docs/ja/source/hardwareRequirement.rst @@ -0,0 +1,73 @@ +************ +システムè¦ä»¶ +************ + +Java 8 ã®ã‚·ã‚¹ãƒ†ãƒ è¦ä»¶ +##################### + +**Windows** + +- Windows 10 (8u51 以é™) +- Windows 8.x (デスクトップ) +- Windows 7 SP1 +- Windows Vista SP2 +- Windows Server 2008 R2 SP1 (64-bit) +- Windows Server 2012 and 2012 R2 (64-bit) +- RAM: 128 MB +- ディスク容é‡: 124 MB for JRE; 2 MB for Java Update +- プロセッサー: æœ€å° Pentium 2 266 MHz processor +- ブラウザ: Internet Explorer 9 以é™, Firefox + +**Mac OS X** + +- Mac running Mac OS X 10.8.3+, 10.9+ を実行ã—ã¦ã„るインテルベース Mac +- インストールã®ç®¡ç†è€…æ¨©é™ +- 64ビットブラウザ +- Mac ã§ Oracle Java を実行ã™ã‚‹ã«ã¯ã€64ビットブラウザ (Safari ãªã©) ãŒå¿…è¦ã§ã™ + +**Linux** + +- Oracle Linux 5.5+1 +- Oracle Linux 6.x (32-bit), 6.x (64-bit)2 +- Oracle Linux 7.x (64-bit)2 (8u20 以é™) +- Red Hat Enterprise Linux 5.5+1, 6.x (32-bit), 6.x (64-bit)2 +- Red Hat Enterprise Linux 7.x (64-bit)2 (8u20 以é™) +- Suse Linux Enterprise Server 10 SP2+, 11.x +- Suse Linux Enterprise Server 12.x (64-bit)2 (8u31 以é™) +- Ubuntu Linux 12.04 LTS, 13.x +- Ubuntu Linux 14.x (8u25 以é™) +- Ubuntu Linux 15.04 (8u45 以é™) +- Ubuntu Linux 15.10 (8u65 以é™) +- ブラウザ: Firefox + + +ZooKeeper ã®è¦ä»¶ +################ + +ZooKeeper 㯠Java リリース 1.6 ä»¥é™ (JDK 6 ä»¥é™ ) ã§å®Ÿè¡Œã•れã¾ã™ã€‚ã“れã¯ã€ZooKeeper サーãƒãƒ¼ã®ã‚¢ãƒ³ã‚µãƒ³ãƒ–ルã¨ã—㦠+実行ã•れã¾ã™ã€‚3å°ã® ZooKeeper サーãƒãƒ¼ã¯ã€ã‚¢ãƒ³ã‚µãƒ³ãƒ–ãƒ«ã®æœ€å°æŽ¨å¥¨ã‚µã‚¤ã‚ºã§ã‚りã€åˆ¥ã€…ã®ãƒžã‚·ãƒ³ã§å®Ÿè¡Œã™ã‚‹ã“ã¨ã‚‚ãŠå‹§ã‚ã—ã¾ã™ã€‚ +Yahoo! ã§ã¯ã€ZooKeeper ã¯é€šå¸¸ã€ãƒ‡ãƒ¥ã‚¢ãƒ«ã‚³ã‚¢ãƒ—ロセッサã€2GB ã® RAMã€ãŠã‚ˆã³80GB ã® IDE ãƒãƒ¼ãƒ‰ãƒ‰ãƒ©ã‚¤ãƒ–ã‚’å‚™ãˆãŸå°‚用㮠+RHEL ボックスã«ãƒ‡ãƒ—ロイã•れã¾ã™ã€‚ + +Kafka ã«ã¤ã„ã¦æè¨€ +################## + +**Kafka brokers** ã¯ã€JVM ヒープ㨠OS ページキャッシュã®ä¸¡æ–¹ã‚’使用ã—ã¾ã™ã€‚JVM ヒープã¯ã€ãƒ–ローカー間ã®ãƒ‘ーティション㮠+レプリケーションã¨ãƒ­ã‚°ã®åœ§ç¸®ã«ä½¿ç”¨ã•れã¾ã™ã€‚レプリケーションã«ã¯ã€ãƒ–ローカーã®ãƒ‘ーティションã”ã¨ã«1MB (デフォルト㮠+replica.max.fetch.size) ãŒå¿…è¦ã§ã™ã€‚Apache Kafka 0.10.1 (Confluent Platform 3.1) ã§ã¯ã€ãƒ¬ãƒ—リケーションã«ä½¿ç”¨ã•れる +RAM ã®åˆè¨ˆã‚’10MBã«åˆ¶é™ã™ã‚‹æ–°ã—ã„æ§‹æˆ (replica.fetch.response.max.bytes) を追加ã—ã¦ã€ãƒ¡ãƒ¢ãƒªã¨ã‚¬ãƒ™ãƒ¼ã‚¸ã‚³ãƒ¬ã‚¯ã‚·ãƒ§ãƒ³ã®å•題を +回é¿ã—ã¾ã™ã€‚ブローカーã®ãƒ‘ーティションã¯é«˜ã„ã§ã™ã€‚ログã®åœ§ç¸®ã®å ´åˆã€å¿…è¦ãªãƒ¡ãƒ¢ãƒªã®è¨ˆç®—ã¯ã‚ˆã‚Šè¤‡é›‘ã§ã™ã€‚ã“ã®æ©Ÿèƒ½ã‚’使用 +ã—ã¦ã„ã‚‹å ´åˆã¯ã€Kafka ã®ãƒ‰ã‚­ãƒ¥ãƒ¡ãƒ³ãƒˆã‚’å‚ç…§ã™ã‚‹ã“ã¨ã‚’ãŠå‹§ã‚ã—ã¾ã™ã€‚å°è¦æ¨¡ã‹ã‚‰ä¸­è¦æ¨¡ã®å±•é–‹ã®å ´åˆã€é€šå¸¸ã¯4GBã®ãƒ’ープサイズ +ã§å分ã§ã™ã€‚ã•らã«ã€æ¶ˆè²»è€…ã¯å¸¸ã«ãƒ¡ãƒ¢ãƒªã‹ã‚‰èª­ã¿å–ã‚‹ã“ã¨ã‚’å¼·ããŠå‹§ã‚ã—ã¾ã™ã€‚ã•らã«ã€Consumer ã¯å¸¸ã«ãƒ¡ãƒ¢ãƒªã‹ã‚‰èª­ã¿å–ã‚‹ +ã“ã¨ã‚’å¼·ããŠå‹§ã‚ã—ã¾ã™ã€‚ã¤ã¾ã‚Šã€Kafka ã«æ›¸ãè¾¼ã¾ã‚Œã€OS ページキャッシュã«ä¿å­˜ã•れã¦ã„るデータã‹ã‚‰èª­ã¿å–ã‚‹ã“ã¨ã‚’å¼·ã +ãŠå‹§ã‚ã—ã¾ã™ã€‚ã“れã«å¿…è¦ãªãƒ¡ãƒ¢ãƒªã®é‡ã¯ã€ã“ã®ãƒ‡ãƒ¼ã‚¿ãŒæ›¸ãè¾¼ã¾ã‚Œã‚‹é€Ÿåº¦ã¨ã€æ¶ˆè²»è€…ãŒã©ã‚Œã ã‘é…れるã¨äºˆæƒ³ã•れるã‹ã«ã‚ˆã£ã¦ +ç•°ãªã‚Šã¾ã™ã€‚ブローカーã”ã¨ã«1時間ã‚ãŸã‚Š20GBを書ãè¾¼ã¿ã€é€šå¸¸ã®ã‚·ãƒŠãƒªã‚ªã§ãƒ–ローカーãŒ3時間é…れるã“ã¨ã‚’許å¯ã™ã‚‹å ´åˆã¯ã€ +OS ページキャッシュã«60GBを予約ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚Consumer ãŒãƒ‡ã‚£ã‚¹ã‚¯ã‹ã‚‰ã®èª­ã¿å–りを強制ã•れる場åˆã€ãƒ‘フォーマンス㯠+大幅ã«ä½Žä¸‹ã—ã¾ã™ + +**Kafka Connect** ãれ自体ã¯å¤šãã®ãƒ¡ãƒ¢ãƒªã‚’使用ã—ã¾ã›ã‚“ãŒã€ä¸€éƒ¨ã®ã‚³ãƒã‚¯ã‚¿ã¯åŠ¹çŽ‡ã®ãŸã‚ã«ãƒ‡ãƒ¼ã‚¿ã‚’内部ã§ãƒãƒƒãƒ•ァリング +ã—ã¾ã™ã€‚ãƒãƒƒãƒ•ァリングを使用ã™ã‚‹è¤‡æ•°ã®ã‚³ãƒã‚¯ã‚¿ã‚’実行ã™ã‚‹å ´åˆã¯ã€JVM ヒープサイズを1GB以上ã«å¢—ã‚„ã™å¿…è¦ãŒã‚りã¾ã™ã€‚ + +**Consumers** ã¯ã€Consumer ã”ã¨ã«å°‘ãªãã¨ã‚‚2MBを使用ã—ã€ãƒ–ローカーã‹ã‚‰ã®å¤§ããªå¿œç­”ã®å ´åˆã¯æœ€å¤§64MBを使用ã—ã¾ã™ (通常〠+ãƒãƒ¼ã‚¹ãƒˆãƒˆãƒ©ãƒ•ィックã®å ´åˆ)。プロデューサーã¯ãれãžã‚Œ64MBã®ãƒãƒƒãƒ•ァーをæŒã¡ã¾ã™ã€‚ ã¾ãšã€1GBã®RAMを割り当ã¦ã€Producer +ã”ã¨ã«64MBã€Consumer ã”ã¨ã«16MBを追加ã—ã¾ã™ã€‚ diff --git a/scorpio-broker/docs/ja/source/index.rst b/scorpio-broker/docs/ja/source/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..0b6d58466b04e9ad76514b2358166c2216d8c0c7 --- /dev/null +++ b/scorpio-broker/docs/ja/source/index.rst @@ -0,0 +1,73 @@ +============== +Scorpio Broker +============== + +Scorpio Broker ã¯ã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆãƒ—ロデューサーã¨ã‚³ãƒ³ã‚·ãƒ¥ãƒ¼ãƒžãƒ¼ãŒç›¸äº’ã«å¯¾è©±ã§ãã‚‹ NGSI-LD API を実装ã—ã¾ã™ã€‚ãŸã¨ãˆã°ã€ +一般的㪠IoT ベースã®éƒ¨å±‹ã§ã¯ã€æ¸©åº¦ã‚»ãƒ³ã‚µãƒ¼ã€å…‰ã‚»ãƒ³ã‚µãƒ¼ãªã©ã®ã•ã¾ã–ã¾ãªã‚»ãƒ³ã‚µãƒ¼ãŒã€ã“れらã®ã‚»ãƒ³ã‚µãƒ¼å‡ºåŠ›ã‚’ä½¿ç”¨ã—㦠+コンシューマーã¨ã—ã¦æ©Ÿèƒ½ã™ã‚‹ä¸­å¤®ã‚¢ãƒ—ãƒªã‚±ãƒ¼ã‚·ãƒ§ãƒ³ã«æŽ¥ç¶šã•れã¦ã„ã¾ã™ã€‚ã“ã®ä¸­å¤®ã‚¢ãƒ—リケーションã€ã¤ã¾ã‚Š Scorpio ã«ã¯ +多ãã®ãƒ¦ãƒ¼ã‚¹ã‚±ãƒ¼ã‚¹ãŒã‚りã¾ã™ã€‚ + +1. Scorpio ã¯ã€NGSI-LD API ã¨æƒ…報モデルを使用ã—ã¦ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚’ãã®ãƒ—ロパティã¨ãƒªãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã‚·ãƒƒãƒ—ã§ãƒ¢ãƒ‡ãƒ«åŒ–ã—〠+エンティティをノードã¨ã—ã¦æŒã¤ãƒ—ロパティグラフを形æˆã—ã¾ã™ã€‚エンティティを検出ã—ã€ãƒªãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã‚·ãƒƒãƒ—を追跡ã—〠+プロパティã€ãƒªãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã‚·ãƒƒãƒ—ã€ãŠã‚ˆã³é–¢é€£ã™ã‚‹ãƒ¡ã‚¿æƒ…å ±ã«å¾“ã£ã¦ãƒ•ィルタリングã™ã‚‹ã“ã¨ã§ã€æƒ…報を見ã¤ã‘ã‚‹ã“ã¨ãŒã§ãã¾ã™ã€‚ +ビデオストリームや3Dモデルã®ã‚ˆã†ã« NGSI-LD ã§ç›´æŽ¥è¡¨ã•れã¦ã„ãªã„データã®å ´åˆã€ã‚³ãƒ³ã‚·ãƒ¥ãƒ¼ãƒžãƒ¼ãŒã“ã®æƒ…å ±ã«ç›´æŽ¥ã‚¢ã‚¯ã‚»ã‚¹ +ã§ãるよã†ã«ã™ã‚‹ãƒªãƒ³ã‚¯ã‚’モデルã«è¿½åŠ ã§ãã¾ã™ã€‚ã“ã®ã‚ˆã†ã«ã—ã¦ã€Scorpio ã¯ãƒ‡ãƒ¼ã‚¿ãƒ¬ã‚¤ã‚¯ã«ã‚°ãƒ©ãƒ•ベースã®ã‚¤ãƒ³ãƒ‡ãƒƒã‚¯ã‚¹ã‚’ +æä¾›ã§ãã¾ã™ã€‚ + +2. Scorpio ã¯ã€ä¿å­˜ã•れãŸãƒ‡ãƒ¼ã‚¿ã‚’クエリã™ã‚‹ãŸã‚ã®ã„ãã¤ã‹ã®ã‚¤ãƒ³ã‚¿ãƒ¼ãƒ•ェイスをæä¾›ã™ã‚‹ãŸã‚ã€ä¿å­˜ã•れãŸãƒ‡ãƒ¼ã‚¿ã«å¯¾ã—㦠+ç°¡å˜ã«åˆ†æžã‚’行ã†ã“ã¨ãŒã§ãã¾ã™ã€‚エコシステムã®çжæ³ã‚’予測ã™ã‚‹ãŸã‚ã«ä½¿ç”¨ã§ãるよã†ã«ã€‚例: 巨大ãªå»ºç‰©ã«ã¯ã€ +複数ã®ç«ç½ã‚»ãƒ³ã‚µãƒ¼ã€æ¸©åº¦ã‚»ãƒ³ã‚µãƒ¼ã€ç…™ã‚»ãƒ³ã‚µãƒ¼ãŒã‚りã¾ã™ã€‚誤ã£ãŸç«ç½è­¦å ±å™¨ã®å ´åˆã¯ã€åŽé›†ã•れãŸç‰¹å®šã®åœ°åŸŸã®ç«ç½ãƒ‡ãƒ¼ã‚¿ã€ +温度データã€ãŠã‚ˆã³ç…™ãƒ‡ãƒ¼ã‚¿ã«ã‚ˆã£æ¤œè¨¼ã¦ã§ãã¾ã™ã€‚ + +3. Scorpio ã¯ã€ã‚らゆるイベントã®ç²¾åº¦ã‚’判断ã™ã‚‹ãŸã‚ã«ä½¿ç”¨ã§ãã¾ã™ã€‚ãŸã¨ãˆã°ã€è‡ªå‹•化ã•れãŸè»Šã§ã¯ã€è»Šã®é€Ÿåº¦ã¯ GPS〠+自動速度å–締機ã€é€Ÿåº¦è¨ˆãªã©ã®ã„ãã¤ã‹ã®ã‚¢ãƒ—リケーションã§çŸ¥ã‚‹ã“ã¨ãŒã§ãã¾ã™ã€‚Scorpio ã®å†…部データã¯ã“ã®æ–¹æ³•ã§ä¿å­˜ã•れる +ãŸã‚ã€ã‚µãƒ¼ãƒ‰ãƒ‘ーティã®ã‚¢ãƒ—リケーションã¯ãれを使用ã—ã¦ã€æ­£ç¢ºã•を見ã¤ã‘ã€éšœå®³ã®ã‚るデãƒã‚¤ã‚¹ã‚’特定ã§ãã¾ã™ã€‚ + +.. figure:: ../../en/source/figures/useCaseDiagram.png + +.. toctree:: + :maxdepth: 1 + :caption: イントロダクション + :numbered: + + introduction.rst + +.. toctree:: + :maxdepth: 1 + :caption: ビギナー ガイド + :numbered: + + onepageTutorial.rst + buildScorpio.rst + mqtt.rst + +.. toctree:: + :maxdepth: 1 + :caption: FIWARE NGSI-LD API ウォークスルー + :numbered: + + API_walkthrough.rst + +.. toctree:: + :maxdepth: 1 + :caption: デベロッパー ガイド + :numbered: + + installationGuide.rst + hardwareRequirement.rst + errorHandling.rst + security.rst + HelloWorld.rst + multivalue.rst + +.. toctree:: + :maxdepth: 1 + :caption: アドãƒãƒ³ã‚¹ãƒ‰ ユーザ ガイド + :numbered: + + systemOverview.rst + callFlow.rst + contributionGuideline.rst + docker.rst + config.rst + troubleshooting.rst diff --git a/scorpio-broker/docs/ja/source/installationGuide.rst b/scorpio-broker/docs/ja/source/installationGuide.rst new file mode 100644 index 0000000000000000000000000000000000000000..2118e7f10d11d7fbb187b92896d390e83261b950 --- /dev/null +++ b/scorpio-broker/docs/ja/source/installationGuide.rst @@ -0,0 +1,313 @@ +**************************** +開発者å‘ã‘インストールガイド +**************************** + +Scorpio broker ã®ç’°å¢ƒã‚’セットアップã™ã‚‹ã«ã¯ã€æ¬¡ã®ä¾å­˜é–¢ä¿‚ã‚’æ§‹æˆã™ã‚‹å¿…è¦ãŒã‚りã¾ã™:- + +1. Eclipse. +2. Server JDK. +3. Apache Kafka. +4. PostgreSQL + + +Windows +####### + +Eclipse ã®ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ« +********************** + +- **Eclipse ã®ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ©ãƒ¼ã‚’ダウンロードã—ã¾ã™**: + +http://www.eclipse.org/downloads.Eclipse ã‹ã‚‰ Eclipse インストーラーをダウンロードã—ã¾ã™ã€‚Eclipse ã¯ä¸–界中ã®å¤šãã® +ミラーã§ãƒ›ã‚¹ãƒˆã•れã¦ã„ã¾ã™ã€‚最寄りã®ã‚‚ã®ã‚’é¸æŠžã—ã¦ã€ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ©ãƒ¼ã®ãƒ€ã‚¦ãƒ³ãƒ­ãƒ¼ãƒ‰ã‚’é–‹å§‹ã—ã¦ãã ã•ã„。 + +- **Eclipse ã®ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ©ãƒ¼å®Ÿè¡Œå¯èƒ½ãƒ•ァイルを起動ã—ã¾ã™**: + +Windows ユーザーã®å ´åˆã€Eclipse インストーラーã®å¾Œã€å®Ÿè¡Œå¯èƒ½ãƒ•ァイルã®ãƒ€ã‚¦ãƒ³ãƒ­ãƒ¼ãƒ‰ãŒå®Œäº†ã™ã‚‹ã¨ã€ãƒ€ã‚¦ãƒ³ãƒ­ãƒ¼ãƒ‰ +ディレクトリã§åˆ©ç”¨ã§ãるよã†ã«ãªã‚Šã¾ã™ã€‚Eclipse インストーラー実行å¯èƒ½ãƒ•ァイルを起動ã—ã¾ã™ã€‚ã“ã®ãƒ•ァイルを実行ã™ã‚‹ã¨ã€ +セキュリティ警告ãŒè¡¨ç¤ºã•れる場åˆãŒã‚りã¾ã™ã€‚Eclipse Foundation ãŒãƒ‘ブリッシャーã§ã‚ã‚‹å ´åˆã¯ã€Run (実行) ã‚’é¸æŠžã—ã¾ã™ã€‚ + +Mac ãŠã‚ˆã³ Linux ユーザーã®å ´åˆã§ã‚‚ã€ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ©ãƒ¼ã‚’作æˆã™ã‚‹ã«ã¯ãƒ€ã‚¦ãƒ³ãƒ­ãƒ¼ãƒ‰ã‚’è§£å‡ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚インストーラー㌠+利用å¯èƒ½ã«ãªã£ãŸã‚‰èµ·å‹•ã—ã¾ã™ã€‚ + +- **インストールã™ã‚‹ãƒ‘ãƒƒã‚±ãƒ¼ã‚¸ã‚’é¸æŠžã—ã¾ã™**: + +æ–°ã—ã„ Eclipse インストーラーã¯ã€Eclipse ユーザーãŒåˆ©ç”¨ã§ãるパッケージを表示ã—ã¾ã™ã€‚インストールã™ã‚‹ãƒ‘ッケージを +検索ã™ã‚‹ã‹ã€ãƒªã‚¹ãƒˆã‚’スクロールã§ãã¾ã™ã€‚インストールã™ã‚‹ãƒ‘ãƒƒã‚±ãƒ¼ã‚¸ã‚’é¸æŠžã—ã¦ã‚¯ãƒªãƒƒã‚¯ã—ã¾ã™ã€‚ + +- **ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ãƒ•ã‚©ãƒ«ãƒ€ã‚’é¸æŠžã—ã¾ã™** + +Eclipse をインストールã™ã‚‹ãƒ•ォルダーを指定ã—ã¾ã™ã€‚デフォルトã®ãƒ•ォルダã¯ãƒ¦ãƒ¼ã‚¶ãƒ¼ãƒ‡ã‚£ãƒ¬ã‚¯ãƒˆãƒªã«ã‚りã¾ã™ã€‚‘Install’ +(インストール) ãƒœã‚¿ãƒ³ã‚’é¸æŠžã—ã¦ã€ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã‚’é–‹å§‹ã—ã¾ã™ã€‚ + +- **Eclipse ã®èµ·å‹•** + +インストールãŒå®Œäº†ã™ã‚‹ã¨ã€Eclipse ã‚’èµ·å‹•ã§ãるよã†ã«ãªã‚Šã¾ã™ã€‚Eclipse インストーラーãŒãã®ä½œæ¥­ã‚’完了ã—ã¾ã—ãŸã€‚ +ãƒãƒƒãƒ”ーコーディング。 + + +JDK セットアップ +**************** + +- JDK ã®ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã‚’é–‹å§‹ã—ã€â€œChange destination folder†(インストール先フォルダーã®å¤‰æ›´) ãƒã‚§ãƒƒã‚¯ãƒœãƒƒã‚¯ã‚¹ã‚’ + 押ã—ã¦ã‹ã‚‰ã€'Install' をクリックã—ã¾ã™ã€‚ + +**注æ„:-** 推奨ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã¯ JDK-11 ã§ã™ã€‚Scorpio Broker ã¯ã€ã“ã®ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã§ã®ã¿é–‹ç™ºãŠã‚ˆã³ãƒ†ã‚¹ãƒˆã•れã¦ã„ã¾ã™ã€‚ + +.. figure:: ../../en/source/figures/jdk-1.png + +- インストールディレクトリをã€ãƒ•ォルダåã«ã‚¹ãƒšãƒ¼ã‚¹ã‚’入れãªã„ä»»æ„ã®ãƒ‘スã«å¤‰æ›´ã—ã¾ã™ã€‚ + +Windows ã« Java をインストールã—ãŸå¾Œã€Java インストールディレクトリを指ã™ã‚ˆã†ã« JAVA_HOME 環境変数を設定ã™ã‚‹å¿…è¦ãŒ +ã‚りã¾ã™ã€‚ + +**JAVA_HOME 変数を設定** + +JAVA_HOME 変数を設定ã™ã‚‹ã«ã¯: + +1. Java ãŒã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã•れã¦ã„る場所を調ã¹ã¾ã™ã€‚インストール中ã«ãƒ‘スを変更ã—ãªã‹ã£ãŸå ´åˆã¯ã€æ¬¡ã®ã‚ˆã†ã«ãªã‚Šã¾ã™: + +*C:\Program Files\Java\jdk1.version_detail* + +2. + +- Windows 8/10ã§ã¯ã€**Control Panel** > **System** > **Advanced System Settings** ã«ç§»å‹•ã—ã¾ã™ã€‚ + + ã¾ãŸã¯ + +- Windows 7ã§ã¯ã€**My Computer** をå³ã‚¯ãƒªãƒƒã‚¯ã—ã¦ã€**Properties** > **Advanced** ã‚’é¸æŠžã—ã¾ã™ã€‚ + +3. Environment Variables ボタンをクリックã—ã¾ã™ã€‚ + +4. System Variables ã§ã€New をクリックã—ã¾ã™ã€‚ + +5. User Variable Name フィールドã«ã€æ¬¡ã®ã‚ˆã†ã«å…¥åŠ›ã—ã¾ã™: **JAVA_HOME** + +6. User Variable Value フィールドã«ã€JDK パスを入力ã—ã¾ã™ã€‚ + +(Java ã®ãƒ‘スã¨ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã¯ã€ä½¿ç”¨ã—ã¦ã„ã‚‹ Kafka ã®ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã«ã‚ˆã£ã¦ç•°ãªã‚‹å ´åˆãŒã‚りã¾ã™) + +7. 次㫠OK をクリックã—ã¾ã™ã€‚ + +8. é–‹ã„ãŸã°ã‹ã‚Šã® “Environment Variables†ダイアログボックス㮠“System Variable†セクションã§ãƒ‘ス変数を検索ã—ã¾ã™ã€‚ + +9. パスを編集ã—ã€ä¸‹ã®ç”»åƒã®ã‚ˆã†ã«ã€ã™ã§ã«ãã“ã«æ›¸ãè¾¼ã¾ã‚Œã¦ã„ã‚‹ãƒ†ã‚­ã‚¹ãƒˆã®æœ€å¾Œã« *;%JAVA_HOME%\bin* ã¨å…¥åŠ›ã—ã¾ã™: + +.. figure:: ../../en/source/figures/jdk-3.png + + +- Java ã®ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã‚’確èªã™ã‚‹ã«ã¯ã€cmd ã‚’é–‹ã„㦠“java –version†ã¨å…¥åŠ›ã—ã¾ã™ã€‚インストールã—㟠Java ã®ãƒãƒ¼ã‚¸ãƒ§ãƒ³ãŒè¡¨ç¤ºã•れるã¯ãšã§ã™ã€‚ + +.. figure:: ../../en/source/figures/jdk-4.png + +コマンドプロンプトãŒä¸Šã®ç”»åƒã®ã‚ˆã†ã«ãªã£ã¦ã„ã‚‹å ´åˆã¯ã€å•題ã‚りã¾ã›ã‚“。ãれ以外ã®å ´åˆã¯ã€ã‚»ãƒƒãƒˆã‚¢ãƒƒãƒ—ãƒãƒ¼ã‚¸ãƒ§ãƒ³ãŒæ­£ã—ã„ +OS アーキテクãƒãƒ£ (x86, x64) ã¨ä¸€è‡´ã™ã‚‹ã‹ã©ã†ã‹ã€ã¾ãŸã¯ç’°å¢ƒå¤‰æ•°ã®ãƒ‘ã‚¹ãŒæ­£ã—ã„ã‹ã©ã†ã‹ã‚’å†ç¢ºèªã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ + + +Kafka ã®ã‚»ãƒƒãƒˆã‚¢ãƒƒãƒ— +******************** + +1. Kafka æ§‹æˆãƒ‡ã‚£ãƒ¬ã‚¯ãƒˆãƒªã«ç§»å‹•ã—ã¾ã™ã€‚ +例:- **C:\kafka_2.11-0.9.0.0\config** + +2. ファイル “server.properties†+を編集ã—ã¾ã™ã€‚ + +3. "log.dirs=/tmp/kafka-logs†ãŒã‚る行を見ã¤ã‘㦠+"log.dir= C:\kafka_2.11-0.9.0.0\kafka-logs" ã«ç·¨é›†ã—ã¾ã™ã€‚ + +4. ZooKeeper ãŒä»–ã®ãƒžã‚·ãƒ³ã¾ãŸã¯ã‚¯ãƒ©ã‚¹ã‚¿ãƒ¼ã§å®Ÿè¡Œã•れã¦ã„ã‚‹å ´åˆã¯ã€â€œzookeeper.connect:2181â€Â ã‚’カスタムIPã¨ãƒãƒ¼ãƒˆã« +編集ã§ãã¾ã™ã€‚ã“ã®ãƒ‡ãƒ¢ã§ã¯ã€åŒã˜ãƒžã‚·ãƒ³ã‚’使用ã—ã¦ã„ã‚‹ãŸã‚ã€å¤‰æ›´ã™ã‚‹å¿…è¦ã¯ã‚りã¾ã›ã‚“。ã¾ãŸã€Kafka ãƒãƒ¼ãƒˆã¨ broker.id +ã¯ã“ã®ãƒ•ã‚¡ã‚¤ãƒ«ã§æ§‹æˆã§ãã¾ã™ã€‚ä»–ã®è¨­å®šã¯ãã®ã¾ã¾ã«ã—ã¦ãŠãã¾ã™ã€‚ + +5. Kafka ã¯ãƒ‡ãƒ•ォルトãƒãƒ¼ãƒˆ 9092 ã§å®Ÿè¡Œã•れ〠+ZooKeeper ã®ãƒ‡ãƒ•ォルトãƒãƒ¼ãƒˆ 2181 ã«æŽ¥ç¶šã—ã¾ã™ã€‚ + +**注æ„**: Kafka を実行ã™ã‚‹ã«ã¯ã€Zookeeper を最åˆã«å®Ÿè¡Œã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚Kafka ã‚’é–‰ã˜ã‚‹ã¨ãã¯ã€Kafka よりも Zookeeper +を最åˆã«é–‰ã˜ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚Kafka ã®æŽ¨å¥¨ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã¯ kafka_2.12-2.1.0 ã§ã™ã€‚ + + +Kafka サーãƒãƒ¼ã®å®Ÿè¡Œ +******************** + +é‡è¦: Kafka サーãƒãƒ¼ã‚’èµ·å‹•ã™ã‚‹å‰ã«ã€ZooKeeper インスタンスãŒç¨¼åƒã—ã¦ã„ã‚‹ã“ã¨ã‚’確èªã—ã¦ãã ã•ã„。 + +1. Kafka インストールディレクトリã«ç§»å‹•ã—ã¾ã™: **C:\kafka_2.11-0.9.0.0\** + +2. Shift キーを押ã—ãªãŒã‚‰å³ã‚¯ãƒªãƒƒã‚¯ã—ã¦ã‚³ãƒžãƒ³ãƒ‰ãƒ—ロンプトを開ãã€â€œOpen command window hereâ€ ã‚ªãƒ—ã‚·ãƒ§ãƒ³ã‚’é¸æŠžã—ã¾ã™ã€‚ + +3. 次ã«ã€**.\bin\windows\kafka-server-start.bat .\config\server.properties** ã¨å…¥åŠ›ã—ã€Enter キーを押ã—ã¾ã™ã€‚ + +4. ãã—ã¦ã€**.\bin\windows\kafka-server-start.bat .\config\server.properties** ã‚’æ–°ã—ã„コマンドウィンドウã«ã—ã¦ã€Etner キーを押ã—ã¾ã™ã€‚ + +PostgreSQL ã®ã‚»ãƒƒãƒˆã‚¢ãƒƒãƒ— +************************* + +ステップ 1) https://www.postgresql.org/download ã«ã‚¢ã‚¯ã‚»ã‚¹ã—ã¾ã™ã€‚ + + +ステップ 2) 2ã¤ã®ã‚ªãƒ—ションãŒã‚りã¾ã™:- + +1. EnterpriseDB ã«ã‚ˆã‚‹ã‚¤ãƒ³ã‚¿ãƒ©ã‚¯ãƒ†ã‚£ãƒ–インストーラー + +2. BigSQL ã«ã‚ˆã‚‹ã‚°ãƒ©ãƒ•ィカルインストーラー + +BigSQL ã¯ç¾åœ¨ã€éžæŽ¨å¥¨ã® pgAdmin ãƒãƒ¼ã‚¸ãƒ§ãƒ³3をインストールã—ã¦ã„ã¾ã™ã€‚最新ãƒãƒ¼ã‚¸ãƒ§ãƒ³4をインストールã™ã‚‹ EnterpriseDB +ã‚’é¸æŠžã™ã‚‹ã“ã¨ã‚’ãŠå‹§ã‚ã—ã¾ã™ã€‚ + + +ステップ 3) + +1. 目的㮠Postgres ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã¨ã‚ªãƒšãƒ¬ãƒ¼ãƒ†ã‚£ãƒ³ã‚°ã‚·ã‚¹ãƒ†ãƒ ãŒè¡¨ç¤ºã•れã¾ã™ã€‚Scorpio ã¯ã“ã®ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã§ãƒ†ã‚¹ãƒˆãŠã‚ˆã³é–‹ç™ºã•れã¦ã„ã‚‹ãŸã‚ã€Postgres10 ã‚’é¸æŠžã—ã¾ã™ã€‚ + +2. ダウンロードボタンをクリックã™ã‚‹ã¨ã€ãƒ€ã‚¦ãƒ³ãƒ­ãƒ¼ãƒ‰ãŒå§‹ã¾ã‚Šã¾ã™ã€‚ + +ステップ 4) ダウンロードã—㟠.exe ファイルを開ãã€ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã®ã‚ˆã†ã“ãç”»é¢ã§ "Next" をクリックã—ã¾ã™ã€‚ + + +ステップ 5) + +1. å¿…è¦ã«å¿œã˜ã¦ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ãƒ‡ã‚£ãƒ¬ã‚¯ãƒˆãƒªã‚’変更ã—ã¾ã™ã€‚ãれ以外ã®å ´åˆã¯ãƒ‡ãƒ•ォルトã®ã¾ã¾ã«ã—ã¾ã™ã€‚ + +2. "Next" をクリックã—ã¾ã™ã€‚ + + +ステップ 6) + +1. システムã«ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã™ã‚‹ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã‚’é¸æŠžã§ãã¾ã™ã€‚Stack Builder ã®ãƒã‚§ãƒƒã‚¯ã‚’外ã—ã¦ã‚‚ã‹ã¾ã„ã¾ã›ã‚“。 + +2. "Next" をクリックã—ã¾ã™ã€‚ + + +ステップ 7) + +1. データã®å ´æ‰€ã‚’変更ã§ãã¾ã™ + +2. "Next" をクリックã—ã¾ã™ã€‚ + + +ステップ 8) + +1. スーパーユーザーã®ãƒ‘スワードを入力ã—ã¾ã™ã€‚ãれをメモã—ã¦ãã ã•ã„。 + +2. "Next" をクリックã—ã¾ã™ã€‚ + + +ステップ 9) + +1. ãƒãƒ¼ãƒˆç•ªå·ã‚’デフォルトã®ã¾ã¾ã«ã—ã¾ã™ã€‚ + +2. "Next" をクリックã—ã¾ã™ã€‚ + + +ステップ 10) + +1. インストールå‰ã®æ¦‚è¦ã‚’確èªã—ã¾ã™ã€‚ + +2. "Next" をクリックã—ã¾ã™ã€‚ + +ステップ 11) "Next" をクリックã—ã¾ã™ã€‚ + +ステップ 12) インストールãŒå®Œäº†ã™ã‚‹ã¨ã€Stack Builder プロンプトãŒè¡¨ç¤ºã•れã¾ã™ã€‚ + +1. ãã®ã‚ªãƒ—ションã®ãƒã‚§ãƒƒã‚¯ã‚’外ã—ã¾ã™ã€‚より高度ãªãƒãƒ¥ãƒ¼ãƒˆãƒªã‚¢ãƒ«ã§ Stack Builder を使用ã—ã¾ã™ã€‚ + +2. "Finish" をクリック + +ステップ 13) Postgres ã‚’èµ·å‹•ã™ã‚‹ã«ã¯ã€ã‚¹ã‚¿ãƒ¼ãƒˆãƒ¡ãƒ‹ãƒ¥ãƒ¼ã«ç§»å‹•ã—ã€pgAdmin 4 を検索ã—ã¾ã™ã€‚ + +ステップ 14) pgAdmin ホームページãŒè¡¨ç¤ºã•れã¾ã™ã€‚ + +ステップ 15) 左å´ã®ãƒ„リーã§ã€Servers > Postgre SQL 10 をクリックã—ã¾ã™ã€‚ + +.. figure:: ../../en/source/figures/dbconfig-1.png + +ステップ 16) + +1. インストール時ã«è¨­å®šã—ãŸã‚¹ãƒ¼ãƒ‘ーユーザーパスワードを入力ã—ã¾ã™ã€‚ + +2. "OK" をクリックã—ã¾ã™ã€‚ + +ステップ 17) ダッシュボードãŒè¡¨ç¤ºã•れã¾ã™ã€‚ + +.. figure:: ../../en/source/figures/dbconfig-2.png + +PostgreSQL ã®ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã¯ä»¥ä¸Šã§ã™ã€‚ + +Linux +##### + +JDK セットアップ +**************** + +マシン㫠Java 環境を作æˆã™ã‚‹ã«ã¯ã€JDK をインストールã—ã¾ã™ã€‚ã“れを行ã†ã«ã¯ã€ã‚¿ãƒ¼ãƒŸãƒŠãƒ«ã‚’é–‹ãã€æ¬¡ã®ã‚³ãƒžãƒ³ãƒ‰ã‚’実行ã—ã¾ã™:- + +1. sudo apt-get update + +2. sudo apt-get install openjdk-8-jdk + +JDK ãŒãƒžã‚·ãƒ³ã«æ­£ã—ãインストールã•れã¦ã„ã‚‹ã“ã¨ã‚’確èªã™ã‚‹ã«ã¯ã€ã‚¿ãƒ¼ãƒŸãƒŠãƒ«ã§ã‚³ãƒžãƒ³ãƒ‰ **java -version** を実行ã—ã€JDK +ã®ãƒãƒ¼ã‚¸ãƒ§ãƒ³ãŒ 11 ã¨ã—ã¦è¿”ã•れる場åˆã¯ã€æ­£å¸¸ã«æ©Ÿèƒ½ã—ã¦ã„ã¾ã™ã€‚ + +.. figure:: ../../en/source/figures/javaTerminal.png + +Eclipse ã®ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ« +********************** + +最åˆã« Linux マシン㫠Eclipse をインストールã™ã‚‹ã«ã¯ã€ãƒªãƒ³ã‚¯ https://www.eclipse.org/downloads/ ã«ã‚¢ã‚¯ã‚»ã‚¹ã—ã€Linux +マシンã®ãƒ•レーãƒãƒ¼ã«åŸºã¥ã„㦠Eclipse ã®ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã‚’é¸æŠžã—ã¾ã™ã€‚ + +Kafka ã®ã‚»ãƒƒãƒˆã‚¢ãƒƒãƒ— +******************** + +マシン㫠Apache Kafka をダウンロードã™ã‚‹ã«ã¯ã€ã‚¿ãƒ¼ãƒŸãƒŠãƒ«ã§æ¬¡ã®ã‚³ãƒžãƒ³ãƒ‰ã‚’1ã¤ãšã¤å®Ÿè¡Œã—ã¾ã™ã€‚ + +1. mkdir kafka + +2. cd kafka + +3. wget https://archive.apache.org/dist/kafka/2.2.0/kafka_2.12-2.2.0.tgz + +4. tar -xzf kafka_2.12-2.2.0.tgz + +Kafka ãŒãƒžã‚·ãƒ³ã«ãƒ€ã‚¦ãƒ³ãƒ­ãƒ¼ãƒ‰ã•れãŸã‚‰ã€æ¬¡ã®ã‚³ãƒžãƒ³ãƒ‰ã‚’押ã—ã¦å®Ÿè¡Œã—ã¾ã™ã€‚ + +1. kafka_2.12-2.2.0/bin/zookeeper-server-start.sh kafka_2.12-2.2.0/config/zookeeper.properties > /dev/null 2>&1 & + +2. kafka_2.12-2.2.0/bin/kafka-server-start.sh kafka_2.12-2.2.0/config/server.properties > /dev/null 2>&1 & + +PostgreSQL ã®ã‚»ãƒƒãƒˆã‚¢ãƒƒãƒ— +************************* + +マシン㫠PostgreSQL をダウンロードã™ã‚‹ã«ã¯ã€ã‚¿ãƒ¼ãƒŸãƒŠãƒ«ã‹ã‚‰æ¬¡ã®ã‚³ãƒžãƒ³ãƒ‰ã‚’実行ã—ã¾ã™ã€‚ + +1. sudo apt update + +2. sudo apt-get install postgresql-10 + +3. service postgresql status + +最後ã®ã‚³ãƒžãƒ³ãƒ‰ã¯ã€ãƒžã‚·ãƒ³ã® PostgreSQL ã®ã‚¹ãƒ†ãƒ¼ã‚¿ã‚¹ã‚’示ã—ã¾ã™ã€‚ã“れãŒå›³ã®1ã¤ã¨ä¸€è‡´ã™ã‚‹å ´åˆã¯ã€ã™ã¹ã¦ãŒæ­£ã—ãインストール +ã•れã¦ã„ã¾ã™ã€‚ãれ以外ã®å ´åˆã¯ã€ã‚³ãƒžãƒ³ãƒ‰ã‚’å†å®Ÿè¡Œã—ã¾ã™ã€‚ + +.. figure:: ../../en/source/figures/postgresTerminal.png + +PostgreSQL ãŒãƒžã‚·ãƒ³ã«æ­£å¸¸ã«ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã•れãŸã‚‰ã€ãƒ‡ãƒ¼ã‚¿ãƒ™ãƒ¼ã‚¹ ngb を作æˆã—ã€æ¬¡ã®ã‚³ãƒžãƒ³ãƒ‰ã‚’実行ã—ã¦ãã®å½¹å‰²ã‚’変更ã—ã¾ã™: + +1. psql -U postgres -c "create database ngb;" + +2. psql -U postgres -c "create user ngb with password 'ngb';" + +3. psql -U postgres -c "alter database ngb owner to ngb;" + +4. psql -U postgres -c "grant all privileges on database ngb to ngb;" + +5. psql -U postgres -c "alter role ngb superuser;" + +6. sudo apt install postgresql-10-postgis-2.4 + +7. sudo apt install postgresql-10-postgis-scripts + +8. sudo -u postgres psql -U postgres -c "create extension postgis; + +ã“ã®å¾Œã€PostgreSql ã‚’ ScorpioBoker ã§ä½¿ç”¨ã™ã‚‹æº–å‚™ãŒæ•´ã„ã¾ã™ã€‚ diff --git a/scorpio-broker/docs/ja/source/introduction.rst b/scorpio-broker/docs/ja/source/introduction.rst new file mode 100644 index 0000000000000000000000000000000000000000..1c0756bd21316f1058e91af45307ecf601994472 --- /dev/null +++ b/scorpio-broker/docs/ja/source/introduction.rst @@ -0,0 +1,35 @@ +****************** +イントロダクション +****************** + +Scorpio Broker ã¯ã€**ETSI standards** ã«æº–æ‹ ã—㟠**NGSI-LD standard** 仕様ã®ãƒªãƒ•ァレンス実装ã§ã™ã€‚基本的ã«ã€Scorpio +Broker 㯠**FIWARE/IoT** プラットフォームã®ã‚³ã‚¢ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã§ã‚りã€å‹•的コンテキストã«ã‚ˆã£ã¦é§†å‹•ã•れる IoT データ㌠+åŽé›†ã€å‡¦ç†ã€é€šçŸ¥ã•れã€ã•ã¾ã–ã¾ãªã‚¢ãƒ—リケーションã®ä½¿ç”¨çжæ³ã®è¦³ç‚¹ã‹ã‚‰ä¿å­˜/å–り込ã¾ã‚Œã¾ã™ã€‚Scorpio Broker ã¯ã€NGSI-LD API +ä»•æ§˜ã«æº–æ‹ ã™ã‚‹ã•ã¾ã–ã¾ãªãƒ‡ãƒ¼ã‚¿ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆæ“作用㮠REST API エンドãƒã‚¤ãƒ³ãƒˆã®å®Ÿè£…ã‚‚æä¾›ã—ã¾ã™ã€‚Scorpio Broker +を使用ã™ã‚‹ã¨ã€ãƒªãƒ³ã‚¯ãƒˆãƒ‡ãƒ¼ã‚¿ã®æ¦‚念を使用ã—ã¦ã€å‹•çš„ãªã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã§ IoT データをåŽé›†ã€å‡¦ç†ã€é€šçŸ¥ã€ãŠã‚ˆã³ä¿å­˜ã§ãã¾ã™ã€‚ +**spring boot** ã®åŠ©ã‘ã‚’å€Ÿã‚Šã¦æ§‹ç¯‰ã•れ㟠**microservice-based architecture** (マイクロサービスベースã®ã‚¢ãƒ¼ã‚­ãƒ†ã‚¯ãƒãƒ£) ã‚’ +利用ã—ã¾ã™ã€ã‚¹ã‚±ãƒ¼ãƒ©ãƒ“リティã€ã‚¯ãƒ­ã‚¹ãƒ†ã‚¯ãƒŽãƒ­ã‚¸ãƒ¼çµ±åˆãªã©ã€æ—¢å­˜ã® IoT Broker ã«æ¯”ã¹ã¦ç‹¬è‡ªã®åˆ©ç‚¹ãŒã‚りã¾ã™ã€‚ + +NGSI-LD ã«åŸºã¥ã Scorpio Broker ã¯ã€ãƒªãƒ³ã‚¯ãƒ‡ãƒ¼ã‚¿ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã®ç‹¬è‡ªã®æ©Ÿèƒ½ã‚’æä¾›ã—ã€å„メッセージ/エンティティã«å«ã¾ã‚Œã‚‹ +データã®è‡ªå·±å®Œçµåž‹ (self-contained) ã¾ãŸã¯å‚ç…§ (referenced)〠**動的スキーマ定義** (**dynamic schema definition**) 〠+ã¤ã¾ã‚Šã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚’æä¾›ã—ã¾ã™ã€‚ã—ãŸãŒã£ã¦ã€Scorpio Broker ã®ã‚³ã‚¢å‡¦ç†ã¯ã€ã•ã¾ã–ã¾ãªã‚¹ã‚­ãƒ¼ãƒžã¨çµåˆã•れãŸã€ã¾ãŸã¯ +設計ã•れãŸã€ã•ã¾ã–ã¾ãªã‚¿ã‚¤ãƒ—ã®ãƒ‡ãƒ¼ã‚¿ã‚½ãƒ¼ã‚¹ã‹ã‚‰ã®å…¥åŠ›ã¨ã—ã¦å‹•çš„ãªã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆé§†å‹•型データをå–å¾—ã—ã¦ã‚‚ã€çµ±åˆã•れãŸã¾ã¾ã« +ãªã‚Šã¾ã™ã€‚ + +ä»–ã®ãƒ–ローカーã«å¯¾ã™ã‚‹ ScorpioBroker ã®ä¸»ãªåˆ©ç‚¹: + +- パフォーマンスを大幅ã«å‘上ã•ã›ã‚‹ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スアーキテクãƒãƒ£ã‚’使用ã—ã¾ã™ã€‚ + +- Scorpio Broker アーキテクãƒãƒ£ã¯ã€ã‚¹ã‚±ãƒ¼ãƒ©ãƒ–ルã§å¯ç”¨æ€§ãŒé«˜ãã€è² è·åˆ†æ•£ã•れãŸã‚‚ã®ã¨ã—ã¦è¨­è¨ˆãŠã‚ˆã³å®Ÿè£…ã•れã¦ã„ã¾ã™ã€‚ + +- 動的コンテキストを活用ã§ãã‚‹ Ld を使用ã—ã¾ã™ã€‚ + +- Kafka を使用ã—ã¦ã€ãƒ€ã‚¦ãƒ³ã‚¿ã‚¤ãƒ ãªã—ã§ã‚¹ã‚±ãƒ¼ãƒªãƒ³ã‚°ã™ã‚‹æ©Ÿèƒ½ã‚’å‚™ãˆãŸå …牢㪠pub-sub サービスをå¯èƒ½ã«ã—ã¾ã™ã€‚ + +- フェイルオーãƒãƒ¼ã®å¾©å…ƒåŠ›ã‚’æä¾›ã—ã¾ã™ã€‚ + +- 分散インフラストラクãƒãƒ£ã«è² è·ã‚’分散ã™ã‚‹ãŸã‚ã®è² è·åˆ†æ•£ã‚’æä¾›ã—ã¾ã™ã€‚ + +- 設計上ã€ä½Žçµåˆã¨é«˜å‡é›†åº¦ã‚’æä¾›ã™ã‚‹ã®ã«å分ãªãƒ¢ã‚¸ãƒ¥ãƒ¼ãƒ«å¼ã§ã™ã€‚ + +- アプリケーションロジックを何度も変更ã™ã‚‹ã“ã¨ãªãã€ã•ã¾ã–ã¾ãªã‚¹ãƒˆãƒ¬ãƒ¼ã‚¸çµ±åˆã‚’æä¾›ã—ã¾ã™ã€‚ diff --git a/scorpio-broker/docs/ja/source/mqtt.rst b/scorpio-broker/docs/ja/source/mqtt.rst new file mode 100644 index 0000000000000000000000000000000000000000..19ebfb9dd64b46c1d7b1eb2415626fbdd4e45f55 --- /dev/null +++ b/scorpio-broker/docs/ja/source/mqtt.rst @@ -0,0 +1,156 @@ +*************************** +MQTT ノーティフィケーション +*************************** + +MQTT 㯠pub/sub ベースã®ãƒ¡ãƒƒã‚»ãƒ¼ã‚¸ãƒã‚¹ã§ã‚りã€ãƒˆãƒ”ックを処ç†ã—ã¾ã™ã€‚詳細ã«ã¤ã„ã¦ã¯ã€https://mqtt.org/ ã‚’ã”覧ãã ã•ã„。 +NGSI-LD を使用ã™ã‚‹ã¨ã€MQTT を介ã—ã¦ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションをå—ä¿¡ã§ãã¾ã™ã€‚HTTP 経由ã§å—ä¿¡ã—ãŸã‚µãƒ–スクリプションã¯ã€ +サブスクリプション㮠"notification.endpoint.uri" メンãƒãƒ¼ã§ MQTT エンドãƒã‚¤ãƒ³ãƒˆã‚’指定ã—ã€MQTT ノーティフィケーション +ãƒã‚¤ãƒ³ãƒ‡ã‚£ãƒ³ã‚°ã¯ NGSI-LD 実装ã§ã‚µãƒãƒ¼ãƒˆã•れã¾ã™ã€‚ã“ã®ã‚µãƒ–スクリプションã«é–¢é€£ã™ã‚‹ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションã¯ã€MQTT +プロトコル経由ã§é€ä¿¡ã•れã¾ã™ã€‚ + +MQTT エンドãƒã‚¤ãƒ³ãƒˆ URI ã®æ§‹æ–‡ã¯ **mqtt[s]://[][:]@[:]/[/]** ã§ã‚り〠+MQTT エンドãƒã‚¤ãƒ³ãƒˆã‚’ URI ã¨ã—ã¦è¡¨ã™ãŸã‚ã®æ—¢å­˜ã®è¦å‰‡ã«å¾“ã„ã¾ã™ã€‚ + +ユーザーåã¨ãƒ‘スワードã¯ã€ã‚¨ãƒ³ãƒ‰ãƒã‚¤ãƒ³ãƒˆ URI ã®ä¸€éƒ¨ã¨ã—ã¦ã‚ªãƒ—ã‚·ãƒ§ãƒ³ã§æŒ‡å®šã§ãã¾ã™ã€‚ãƒãƒ¼ãƒˆãŒæ˜Žç¤ºçš„ã«æŒ‡å®šã•れã¦ã„ãªã„ +å ´åˆã€ãƒ‡ãƒ•ォルト㮠MQTT ãƒãƒ¼ãƒˆã¯ MQTT over TCP ã®å ´åˆã¯ **1883** ã€MQTTS ã®å ´åˆã¯ **8883** ã§ã™ã€‚MQTT プロトコルã®å ´åˆã€ +ç¾åœ¨ã‚µãƒãƒ¼ãƒˆã•れã¦ã„ã‚‹ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã¯ **MQTTv3.1.1** 㨠**MQTTv5.0** ã®2ã¤ã§ã™ã€‚ + +.. figure:: ../../en/source/figures/MQTT.jpg + +MQTT を介ã—ãŸScorpio Broker ã®ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションã®ãƒ•ロー:- + +1. TOPIC をサブスクライブã—ã¾ã™ã€‚ + +2. ノーティフィケーションをé€ä¿¡ã™ã‚‹ãŸã‚ã®é€£çµ¡å…ˆã¨ã—㦠MQTT サーãƒãƒ¼ã® URI を使用ã—ã¦ã€NGSI-LD サブスクリプションを作æˆã—ã¾ã™ã€‚ + +3. URI ã‹ã‚‰æŠ½å‡ºã•れãŸãƒˆãƒ”ックã«ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションを公開ã—ã¾ã™ã€‚ + +4. MQTT サーãƒãƒ¼ã‹ã‚‰ MQTT サブスクライãƒãƒ¼ã«ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションをé€ä¿¡ã—ã¾ã™ã€‚ + +MQTT ブローカーを開始ã™ã‚‹ã«ã¯ã€ä»¥ä¸‹ã®æ‰‹é †ã«å¾“ã„ã¾ã™:- + +1. MQTT ブローカー (Mosquitto) をインストールã—ã¾ã™ã€‚ + +2. Chrome 拡張機能 MQTTlens を追加ã—ã¾ã™ã€‚ + +3. MQTT ブローカー接続を作æˆã—ã¾ã™ã€‚ + +4. トピックをサブスクライブã—ã¾ã™ã€‚ + +オペレーション +############## + +1. エンティティã®ä½œæˆ +********************* + +エンティティを作æˆã™ã‚‹ã«ã¯ã€æŒ‡å®šã•れãŸãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã§ã‚¨ãƒ³ãƒ‰ãƒã‚¤ãƒ³ãƒˆ **http://:/ngsi-ld/v1/entities/** ã‚’ +ヒットã—ã¾ã™ã€‚ + +.. code-block:: JSON + + { + "id":"urn:ngsi-ld:Vehicle:A135", + "type":"Vehicle", + "brandName":{ + "type":"Property", + "value":"Mercedes" + }, + "speed":[{ + "type":"Property", + "value": 55, + "datasetId": "urn:ngsi-ld:Property:speedometerA4567-speed", + "source":{ + "type":"Property", + "value": "Speedometer" + } + }, + { + "type":"Property", + "value": 11, + "datasetId": "urn:ngsi-ld:Property:gpsA4567-speed", + "source":{ + "type":"Property", + "value": "GPS" + } + }, + { + "type":"Property", + "value": 10, + "source":{ + "type":"Property", + "value": "CAMERA" + } + }] + } + +2. サブスクリプション +********************* + +エンティティã«ã‚µãƒ–スクライブã™ã‚‹ã«ã¯ã€æŒ‡å®šã•れãŸãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã§ã‚¨ãƒ³ãƒ‰ãƒã‚¤ãƒ³ãƒˆ +**http://:/ ngsi-ld/v1/subscriptions/** ã«ã‚¢ã‚¯ã‚»ã‚¹ã—ã¾ã™ã€‚ + +.. code-block:: JSON + + { + "id": "urn:ngsi-ld:Subscription:16", + "type": "Subscription", + "entities": [{ + "id": "urn:ngsi-ld:Vehicle:A135", + "type": "Vehicle" + }], + "watchedAttributes": ["brandName"], + "q":"brandName!=Mercedes", + "notification": { + "attributes": ["brandName"], + "format": "keyValues", + "endpoint": { + "uri": "mqtt://localhost:1883/notify", + "accept": "application/json", + "notifierinfo": { + "version" : "mqtt5.0", + "qos" : 0 + } + } + } + } + +3. ノーティフィケーション +************************* + +属性ã®å€¤ã‚’æ›´æ–°ã—ã€**http://:/ngsi-ld/v1/entities/entityId/attrs** ã§ PATCH リクエストを行ã†å ´åˆã€ + +.. code-block:: JSON + + { + "brandName":{ + "type":"Property", + "value":"BMW" + } + } + +次ã«ã€ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションをå—ã‘å–りã¾ã™ã€‚ + +.. code-block:: JSON + + { + "metadata": { + "link": "https://json-ld.org/contexts/person.jsonld", + "contentType": "application/json" + }, + "body": { + "id": "ngsildbroker:notification:-7550927064189664633", + "type": "Notification", + "data": [{ + "id": "urn:ngsi-ld:Vehicle:A135", + "type": "Vehicle", + "brandName": { + "type": "Property", + "createdAt": "2020-07-29T07:19:33.872000Z", + "value": "BMW", + "modifiedAt": "2020-07-29T07:51:21.183000Z" + } + }], + "notifiedAt": "2020-07-29T07:51:22.300000Z", + "subscriptionId": "urn:ngsi-ld:Subscription:16" + } + } diff --git a/scorpio-broker/docs/ja/source/multivalue.rst b/scorpio-broker/docs/ja/source/multivalue.rst new file mode 100644 index 0000000000000000000000000000000000000000..c2bc1eb2750b356eb66421202168283d9301c043 --- /dev/null +++ b/scorpio-broker/docs/ja/source/multivalue.rst @@ -0,0 +1,131 @@ +********************************** +複数値属性 (Multi-value Attribute) +********************************** + +複数値属性 (Multi-value Attribute) ã¯ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ãŒè¤‡æ•°ã®ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã‚’æŒã¤å±žæ€§ã‚’åŒæ™‚ã«æŒã¤ã“ã¨ãŒã§ãる機能ã§ã™ã€‚ +プロパティã®å ´åˆã€ãŸã¨ãˆã°ã€ç•°ãªã‚‹å“質特性をæŒã¤ç‹¬ç«‹ã—ãŸã‚»ãƒ³ã‚µãƒ¼æ¸¬å®šã«åŸºã¥ã„ã¦ã€ãƒ—ロパティ値をæä¾›ã™ã‚‹ã‚½ãƒ¼ã‚¹ãŒä¸€åº¦ã« +複数存在ã™ã‚‹å ´åˆãŒã‚りã¾ã™ã€‚ + +例: 車ã®ç¾åœ¨ã®é€Ÿåº¦ã‚’æä¾›ã™ã‚‹ã‚¹ãƒ”ードメーター㨠GPS を使用ã™ã‚‹ã‹ã€æ¸©åº¦è¨ˆã¾ãŸã¯èµ¤å¤–線カメラを使用ã—ã¦ä¸¡æ–¹ã¨ã‚‚体温を +æä¾›ã—ã¾ã™ã€‚ + +.. figure:: ../../en/source/figures/multivalueDiagram.png + +リレーションシップã®å ´åˆã€æ©Ÿèƒ½ã—ãªã„リレーションシップ (Non-functional Relationships) ãŒå­˜åœ¨ã™ã‚‹å¯èƒ½æ€§ãŒã‚りã¾ã™ã€‚ +ãŸã¨ãˆã°ã€éƒ¨å±‹ (Room) ã®å ´åˆã€ã•ã¾ã–ã¾ãªäººã€…ã«ã‚ˆã£ã¦ãã“ã«ç½®ã‹ã‚Œã€æ™‚é–“ã¨ã¨ã‚‚ã«å‹•çš„ã«å¤‰åŒ–ã™ã‚‹ã€ç¾åœ¨éƒ¨å±‹ã«ã‚ã‚‹ã‚らゆる +種類ã®ã‚ªãƒ–ジェクトã«å¯¾ã—ã¦ã€è¤‡æ•°ã® "å«ã‚€" リレーションシップ (multiple "contains" Relationships) ãŒå­˜åœ¨ã™ã‚‹å ´åˆãŒ +ã‚りã¾ã™ã€‚ã“ã®ã‚ˆã†ãªè¤‡æ•°å±žæ€§ã‚’明示的ã«ç®¡ç†ã§ãるよã†ã«ã™ã‚‹ãŸã‚ã«ã€ã‚ªãƒ—ション㮠datasetId プロパティãŒä½¿ç”¨ã•れã¾ã™ã€‚ +ã“れã¯ãƒ‡ãƒ¼ã‚¿åž‹ URI ã§ã™ã€‚ + +CRUD æ“作 +--------- + +属性ã®ä½œæˆã€æ›´æ–°ã€è¿½åŠ ã€ã¾ãŸã¯å‰Šé™¤æ™‚ã« datasetId ãŒæŒ‡å®šã•れãŸå ´åˆã€åŒã˜ datasetId ã‚’æŒã¤ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã®ã¿ãŒå½±éŸ¿ã‚’å—ã‘〠+別㮠datasetId ã‚’æŒã¤ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã¾ãŸã¯ datasetId ã®ãªã„インスタンスã¯å¤‰æ›´ã•れã¾ã›ã‚“。datasetId ãŒæŒ‡å®šã•れã¦ã„ãªã„ +å ´åˆã¯ã€ãƒ‡ãƒ•ォルトã®å±žæ€§ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã¨è¦‹ãªã•れã¾ã™ã€‚ã“ã®ãƒ‡ãƒ•ォルト㮠datasetId ã‚’æ˜Žç¤ºçš„ã«æŒ‡å®šã™ã‚‹å¿…è¦ã¯ã‚りã¾ã›ã‚“ãŒã€ +存在ã—ãªã„å ´åˆã§ã‚‚ã€ã“ã®ãƒ‡ãƒ•ォルト㮠datasetId ãŒãƒªã‚¯ã‚¨ã‚¹ãƒˆã«å­˜åœ¨ã™ã‚‹ã‹ã®ã‚ˆã†ã«æ‰±ã‚れã¾ã™ã€‚ã—ãŸãŒã£ã¦ã€datasetId +を指定ã›ãšã«å±žæ€§ã‚’作æˆã€æ›´æ–°ã€è¿½åŠ ã€ã¾ãŸã¯å‰Šé™¤ã™ã‚‹ã¨ã€ãƒ‡ãƒ•ォルトã®ãƒ—ロパティインスタンスã«ã®ã¿å½±éŸ¿ã—ã¾ã™ã€‚ + +注:- リクエストã¾ãŸã¯ãƒ¬ã‚¹ãƒãƒ³ã‚¹ã§ã¯ã€ç‰¹å®šã®å±žæ€§åã‚’æŒã¤å±žæ€§ã®ãƒ‡ãƒ•ォルトã®å±žæ€§ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã¯1ã¤ã ã‘ã§ã™ã€‚ + +エンティティ情報をリクエストã™ã‚‹ã¨ãã«ã€ä¸€è‡´ã™ã‚‹å±žæ€§ã®ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ãŒè¤‡æ•°ã‚ã‚‹å ´åˆã€ã“れらã¯å˜ä¸€ã® Attribute è¦ç´  +ã§ã¯ãªãã€ãれãžã‚Œå±žæ€§ã®é…列ã¨ã—ã¦è¿”ã•れã¾ã™ã€‚デフォルトã®å±žæ€§ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã® datasetId ãŒãƒ¬ã‚¹ãƒãƒ³ã‚¹ã«æ˜Žç¤ºçš„ã«å«ã¾ã‚Œã‚‹ +ã“ã¨ã¯ã‚りã¾ã›ã‚“。datadataId ãŒé‡è¤‡ã—ã¦ã„ã‚‹ãŒã€ä»–ã®å±žæ€§ãƒ‡ãƒ¼ã‚¿ã«é•ã„ãŒã‚ã‚‹å±žæ€§ã®æƒ…å ±ãŒç«¶åˆã™ã‚‹å ´åˆã¯ã€æœ€æ–°ã® observedAt +DateTime ãŒã‚ã‚‹å ´åˆã¯ãれを使用ã—ã€ãれ以外ã®å ´åˆã¯æœ€æ–°ã® modifiedAt DateTime を使用ã™ã‚‹ã‚‚ã®ãŒæä¾›ã•れã¾ã™ã€‚ + +1. ä½œæˆæ“作 (Create Operation) +============================== + +複数値属性をæŒã¤ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚’作æˆã™ã‚‹ãŸã‚ã« ã€æŒ‡å®šã•れãŸãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã§ã‚¨ãƒ³ãƒ‰ãƒã‚¤ãƒ³ãƒˆ +**http://:/ngsi-ld/v1/entities/** ã«ã‚¢ã‚¯ã‚»ã‚¹ã§ãã¾ã™ã€‚ + +.. code-block:: JSON + + { + "id":"urn:ngsi-ld:Vehicle:A135", + "type":"Vehicle", + "brandName":{ + "type":"Property", + "value":"Mercedes" + }, + "speed":[{ + "type":"Property", + "value": 55, + "datasetId": "urn:ngsi-ld:Property:speedometerA4567-speed", + "source":{ + "type":"Property", + "value": "Speedometer" + } + }, + { + "type":"Property", + "value": 11, + "datasetId": "urn:ngsi-ld:Property:gpsA4567-speed", + "source":{ + "type":"Property", + "value": "GPS" + } + }, + { + "type":"Property", + "value": 10, + "source":{ + "type":"Property", + "value": "CAMERA" + } + }] + } + +2. æ›´æ–°æ“作 (Update Operation) +============================== + +- **datasetId ã«åŸºã¥ã„ã¦å±žæ€§ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹å€¤ã‚’æ›´æ–°ã—ã¾ã™** + +ボディ㧠datasetId ã‚’é€ä¿¡ã—ã€**http://:/ngsi-ld/v1/entities/entityId/attrs/attrsId** ã§ +PATCH リクエストを行ã†ã“ã¨ã§ã€ç‰¹å®šã®ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã®å€¤ã‚’æ›´æ–°ã§ãã¾ã™ã€‚ + +.. code-block:: JSON + + { + "value":"27", + "datasetId":"urn:ngsi-ld:Property:speedometerA4567-speed" + } + + +- **属性åã«åŸºã¥ã„ã¦ãƒ‡ãƒ•ォルトã®å±žæ€§ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹å€¤ã‚’æ›´æ–°ã—ã¾ã™** + +ãƒšã‚¤ãƒ­ãƒ¼ãƒ‰ã®æ›´æ–°ã•れãŸå€¤ã®ã¿ã‚’使用 ã—㦠**http://:/ngsi-ld/v1/entities/entityId/attrs/attrsId** ã§ +PATCH リクエストを行ã†ã“ã¨ã«ã‚ˆã‚Šã€ãƒ‡ãƒ•ォルトインスタンスã®å€¤ã‚’æ›´æ–°ã§ãã¾ã™ã€‚ + +.. code-block:: JSON + + { + "value":"27" + } + +3. 削除æ“作 (Delete Operation) +============================== + +- **デフォルトã®å±žæ€§ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã‚’削除ã—ã¾ã™** + +デフォルトã®å±žæ€§ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã‚’削除ã™ã‚‹ã«ã¯ã€URL +**http://:/ngsi-ld/v1/entities/entityId/attrs/attrsId** を使用ã—㦠DELETE リクエストを行ã„ã¾ã™ã€‚ +ã“れã«ã‚ˆã‚Šã€å±žæ€§ã®ãƒ‡ãƒ•ォルトインスタンスãŒå‰Šé™¤ã•れã¾ã™ã€‚ + +- **datasetId ã‚’æŒã¤å±žæ€§ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã‚’削除ã—ã¾ã™** + +特定ã®å±žæ€§ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã‚’削除ã™ã‚‹ã«ã¯ã€URL +**http://:/ngsi-ld/v1/entities/entityId/attrs/attrsId?datasetId={{datasetId}}** を使用ã—㦠DELETE +リクエストを行ã„ã¾ã™ã€‚ã“ã“ã§ã€datasetId ã¯å‰Šé™¤ã™ã‚‹å¿…è¦ã®ã‚るインスタンスã®ID ã§ã™ã€‚ + +- **指定ã•れãŸå±žæ€§åã‚’æŒã¤ã™ã¹ã¦ã®å±žæ€§ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã‚’削除ã—ã¾ã™** + +指定ã•れãŸå±žæ€§åã‚’æŒã¤ã™ã¹ã¦ã®å±žæ€§ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã‚’削除ã™ã‚‹å ´åˆã¯ã€URL +**http://:/ngsi-ld/v1/entities/entityId/attrs/attrsId?deleteAll=true** を使用ã—㦠DELETE リクエストを +行ã†å¿…è¦ãŒã‚りã¾ã™ã€‚ + +4. クエリæ“作 (Query Operation) +=============================== + +エンティティã®è©³ç´°ã‚’å–å¾—ã™ã‚‹ã«ã¯ã€**http://:/ngsi-ld/v1/entities/** を使用ã—㦠GET リクエストを +行ã†ã¨ã€å¿…è¦ãªå±žæ€§ã®ã™ã¹ã¦ã®ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ãŒå–å¾—ã•れã¾ã™ã€‚ diff --git a/scorpio-broker/docs/ja/source/onepageTutorial.rst b/scorpio-broker/docs/ja/source/onepageTutorial.rst new file mode 100644 index 0000000000000000000000000000000000000000..eb434498facec7eb5d2ab714d0ff4560f1e68068 --- /dev/null +++ b/scorpio-broker/docs/ja/source/onepageTutorial.rst @@ -0,0 +1,289 @@ +********** +基本ガイド +********** + +アーキテクãƒãƒ£ã®æ¦‚è¦ +#################### + +Scorpio Broker ã¯ã€NGSI-LD APIs ã®ãƒªãƒ•ァレンス実装ã§ã™ã€‚Scorpio Broker ã¯ã€NGSI-LD API ä»•æ§˜ã«æº–æ‹ ã™ã‚‹ã•ã¾ã–ã¾ãª +データコンテキストæ“作用㮠REST API エンドãƒã‚¤ãƒ³ãƒˆã®å®Ÿè£…ã‚’æä¾›ã—ã¾ã™ã€‚Scorpio Broker コンãƒãƒ¼ãƒãƒ³ãƒˆã¯ã€ãƒ¢ã‚¸ãƒ¥ãƒ¼ãƒ«å¼ã® +マイクロサービス指å‘ã€ã‚¹ã‚±ãƒ¼ãƒ©ãƒ–ルã€è¨­è¨ˆã«ã‚ˆã‚‹å®‰å…¨æ€§ã€ç›£è¦–/デãƒãƒƒã‚°ãŒå®¹æ˜“ã€ãƒ•ォールトトレラント〠+高å¯ç”¨æ€§ã‚¢ãƒ¼ã‚­ãƒ†ã‚¯ãƒãƒ£ã«åŸºã¥ã„ã¦å®Ÿè£…ã•れã¦ã„ã¾ã™ã€‚NGSI-LD ã«åŸºã¥ã ScorpioBroker ã¯ã€ãƒªãƒ³ã‚¯ãƒˆãƒ‡ãƒ¼ã‚¿ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã® +ç‹¬è‡ªã®æ©Ÿèƒ½ã‚’æä¾›ã—ã€å„メッセージ/エンティティã«å«ã¾ã‚Œã‚‹ãƒ‡ãƒ¼ã‚¿ã®è‡ªå·±å®Œçµåž‹ (ã¾ãŸã¯å‚ç…§) ã®å‹•的スキーマ定義 +(ã¤ã¾ã‚Šã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆ) ã‚’æä¾›ã—ã¾ã™ã€‚ã—ãŸãŒã£ã¦ã€Scorpio Broker ã®ã‚³ã‚¢å‡¦ç†ã¯ã€ã•ã¾ã–ã¾ãªã‚¹ã‚­ãƒ¼ãƒžã¨çµåˆã•れ㟠+(ã¾ãŸã¯è¨­è¨ˆã•れãŸ) ã•ã¾ã–ã¾ãªã‚¿ã‚¤ãƒ—ã®ãƒ‡ãƒ¼ã‚¿ã‚½ãƒ¼ã‚¹ã‹ã‚‰ã®å…¥åŠ›ã¨ã—ã¦å‹•çš„ãªã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆé§†å‹•型データをå–å¾—ã—ã¦ã‚‚〠+çµ±åˆã•れãŸã¾ã¾ã«ãªã‚Šã¾ã™ã€‚ + +.. figure:: ../../en/source/figures/architecture.png + +Scorpio Broker ã®åŸºæœ¬ã‚¢ãƒ¼ã‚­ãƒ†ã‚¯ãƒãƒ£ã¯5ã¤ã®ãƒ¬ã‚¤ãƒ¤ãƒ¼ã§æ§‹æˆã•ã‚Œã€æœ€åˆã®ãƒ¬ã‚¤ãƒ¤ãƒ¼ã¯ãƒ—ロデューサーã¨ã‚³ãƒ³ã‚·ãƒ¥ãƒ¼ãƒžãƒ¼ã¨ã—㦠+機能ã™ã‚‹ Scorpio Broker ã‚¯ãƒ©ã‚¤ã‚¢ãƒ³ãƒˆã§æ§‹æˆã•れã¾ã™ã€‚2番目ã®ãƒ¬ã‚¤ãƒ¤ãƒ¼ã¯ã€Scorpio Broker ã¨å¤–部世界ã¨ã®é–“ã®ã‚¤ãƒ³ã‚¿ãƒ¼ãƒ•ェイス +ã¨ã—ã¦æ©Ÿèƒ½ã—ã¾ã™ã€‚ã“ã®ãƒ¬ã‚¤ãƒ¤ãƒ¼ã¯ã€NGSI-LD 準拠ã®ã‚¤ãƒ³ã‚¿ãƒ¼ãƒ•ェイスã€ã‚µãƒ¼ãƒ“ス API ゲートウェイã€ãŠã‚ˆã³ã‚µãƒ¼ãƒ“ス㮠+ディスカãƒãƒªãƒ¼ã¨ãƒ¬ã‚¸ã‚¹ãƒˆãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã§æ§‹æˆã•れã¾ã™ã€‚第3層ã«ã¯ã™ã¹ã¦ã®ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スãŒå«ã¾ã‚Œã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã® CRUD +æ“作ãªã©ã®ã‚¿ã‚¹ã‚¯ã®å¤§éƒ¨åˆ†ã‚’担当ã—ã¾ã™ã€‚第4層ã¯ã€ã‚¹ãƒˆãƒ¬ãƒ¼ã‚¸ã‹ã‚‰ã•ã¾ã–ã¾ãªãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スを接続ã™ã‚‹ã‚¤ãƒ³ã‚¿ãƒ¼ãƒ•ェイス +ã¨ã—ã¦æ©Ÿèƒ½ã—ã¾ã™ã€‚5番目ã®ãƒ¬ã‚¤ãƒ¤ãƒ¼ã¯ã€Scorpio Broker ã®ã‚¹ãƒˆãƒ¬ãƒ¼ã‚¸ã¨ã—ã¦æ©Ÿèƒ½ã™ã‚‹ãƒªã‚½ãƒ¼ã‚¹ãƒ¬ã‚¤ãƒ¤ãƒ¼ã§ã™ã€‚ + +**************************** +システム セットアップ ガイド +**************************** + +Scorpio Broker ã®ç’°å¢ƒã‚’セットアップã™ã‚‹ã«ã¯ã€æ¬¡ã®ä¾å­˜é–¢ä¿‚ã‚’æ§‹æˆã™ã‚‹å¿…è¦ãŒã‚りã¾ã™:- + +1. Server JDK. + +2. Apache Kafka. + +3. PostgreSQL + + +Windows +####### + +JDK セットアップ +**************** + +- JDK ã®ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã‚’é–‹å§‹ã—ã€â€œChange destination folder†ãƒã‚§ãƒƒã‚¯ãƒœãƒƒã‚¯ã‚¹ã‚’押ã—ã¦ã‹ã‚‰ã€'Install' をクリックã—ã¾ã™ã€‚ + +**注æ„:-** 推奨ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã¯ JDK-11 ã§ã™ã€‚Scorpio Broker ã¯ã€ã“ã®ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã§ã®ã¿é–‹ç™ºãŠã‚ˆã³ãƒ†ã‚¹ãƒˆã•れã¦ã„ã¾ã™ã€‚ + +.. figure:: ../../en/source/figures/jdk-1.png + +- インストールディレクトリをã€ãƒ•ォルダåã«ã‚¹ãƒšãƒ¼ã‚¹ã‚’入れãªã„ä»»æ„ã®ãƒ‘スã«å¤‰æ›´ã—ã¾ã™ã€‚ + +Windows ã« Java をインストールã—ãŸå¾Œã€Java インストールディレクトリを指ã™ã‚ˆã†ã« JAVA_HOME 環境変数を設定ã™ã‚‹å¿…è¦ãŒ +ã‚りã¾ã™ã€‚ + +**JAVA_HOME 変数を設定** + +JAVA_HOME 変数を設定ã™ã‚‹ã«ã¯: + +1. Java ãŒã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã•れã¦ã„る場所を調ã¹ã¾ã™ã€‚インストール中ã«ãƒ‘スを変更ã—ãªã‹ã£ãŸå ´åˆã¯ã€æ¬¡ã®ã‚ˆã†ã«ãªã‚Šã¾ã™ã€‚ + +*C:\Program Files\Java\jdk1.version* + +2. + +- Windows 7 ã§ã¯ã€**My Computer** ã‚’å³ã‚¯ãƒªãƒƒã‚¯ã—ã¦ã€**Properties** > **Advanced** ã‚’é¸æŠžã—ã¾ã™ã€‚ + +ã¾ãŸã¯ + +- Windows 8 ã§ã¯ã€**Control Panel** > **System** > **Advanced System Settings** ã«ç§»å‹•ã—ã¾ã™ã€‚ + +3. "Environment Variables" ボタンをクリックã—ã¾ã™ã€‚ + +4. "System Variables" ã§ã€New をクリックã—ã¾ã™ã€‚ + +5. "User Variable Name" フィールドã«ã€æ¬¡ã®ã‚ˆã†ã«å…¥åŠ›ã—ã¾ã™: **JAVA_HOME** + +6. "User Variable Value" フィールドã«ã€JDK パスを入力ã—ã¾ã™ã€‚ + +(Java パスã¨ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã¯ã€ä½¿ç”¨ã—ã¦ã„ã‚‹ Kafka ã®ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã«ã‚ˆã£ã¦ç•°ãªã‚‹å ´åˆãŒã‚りã¾ã™) + +7. 次ã«ã€'OK' をクリックã—ã¾ã™ã€‚ + +8. é–‹ã„ãŸã°ã‹ã‚Šã® “Environment Variables†ダイアログボックス㮠“System Variable†セクションã§ãƒ‘ス変数を検索ã—ã¾ã™ã€‚ + +9. パスを編集ã—ã€ä¸‹ã®ç”»åƒã®ã‚ˆã†ã«ã€ã™ã§ã«ãã“ã«æ›¸ãè¾¼ã¾ã‚Œã¦ã„ã‚‹ãƒ†ã‚­ã‚¹ãƒˆã®æœ€å¾Œã« *;%JAVA_HOME%\bin* ã¨å…¥åŠ›ã—ã¾ã™ã€‚ + +.. figure:: ../../en/source/figures/jdk-3.png + +- Java ã®ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã‚’確èªã™ã‚‹ã«ã¯ã€cmd.exe ã‚’é–‹ã„㦠“java –version†ã¨å…¥åŠ›ã—ã¾ã™ã€‚インストールã—㟠Java ã®ãƒãƒ¼ã‚¸ãƒ§ãƒ³ãŒ +表示ã•れるã¯ãšã§ã™ã€‚ + +.. figure:: ../../en/source/figures/jdk-4.png + +コマンドプロンプトãŒä¸Šã®ç”»åƒã®ã‚ˆã†ã«ãªã£ã¦ã„ã‚‹å ´åˆã¯ã€å•題ã‚りã¾ã›ã‚“。ãれ以外ã®å ´åˆã¯ã€ã‚»ãƒƒãƒˆã‚¢ãƒƒãƒ—ãƒãƒ¼ã‚¸ãƒ§ãƒ³ãŒæ­£ã—ã„ +OS アーキテクãƒãƒ£ (x86, x64) ã¨ä¸€è‡´ã™ã‚‹ã‹ã©ã†ã‹ã€ã¾ãŸã¯ç’°å¢ƒå¤‰æ•°ã®ãƒ‘ã‚¹ãŒæ­£ã—ã„ã‹ã©ã†ã‹ã‚’å†ç¢ºèªã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ + +Kafka ã®ã‚»ãƒƒãƒˆã‚¢ãƒƒãƒ— +******************** + +1. Kafka æ§‹æˆãƒ‡ã‚£ãƒ¬ã‚¯ãƒˆãƒªã«ç§»å‹•ã—ã¾ã™ã€‚例:- **C:\kafka_2.12-2.1.0\config** + +2. ファイル “server.properties†を編集ã—ã¾ã™ã€‚ + +3. 行 "log.dirs=/tmp/kafka-logs†を見ã¤ã‘㦠“log.dir= C:\kafka_2.11-0.9.0.0\kafka-logs" ã«ç·¨é›†ã—ã¾ã™ã€‚ + +4. ZooKeeper ãŒä»–ã®ãƒžã‚·ãƒ³ã¾ãŸã¯ã‚¯ãƒ©ã‚¹ã‚¿ãƒ¼ã§å®Ÿè¡Œã•れã¦ã„ã‚‹å ´åˆã¯ã€â€œzookeeper.connect:2181†をカスタム IP ã¨ãƒãƒ¼ãƒˆã«ç·¨é›†ã§ãã¾ã™ã€‚ã“ã®ãƒ‡ãƒ¢ã§ã¯ã€åŒã˜ãƒžã‚·ãƒ³ã‚’使用ã—ã¦ã„ã‚‹ãŸã‚ã€å¤‰æ›´ã™ã‚‹å¿…è¦ã¯ã‚りã¾ã›ã‚“。ã¾ãŸã€Kafka ãƒãƒ¼ãƒˆã¨ broker.id ã¯ã“ã®ãƒ•ã‚¡ã‚¤ãƒ«ã§æ§‹æˆã§ãã¾ã™ã€‚ä»–ã®è¨­å®šã¯ãã®ã¾ã¾ã«ã—ã¦ãŠãã¾ã™ã€‚ + +5. Kafka ã¯ãƒ‡ãƒ•ォルトãƒãƒ¼ãƒˆ 9092 ã§å®Ÿè¡Œã•れã€ZooKeeper ã®ãƒ‡ãƒ•ォルトãƒãƒ¼ãƒˆ 2181 ã«æŽ¥ç¶šã—ã¾ã™ã€‚ + +**注æ„**: Kafka を実行ã™ã‚‹ã«ã¯ã€Zookeeper を最åˆã«å®Ÿè¡Œã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚Kafka ã‚’é–‰ã˜ã‚‹ã¨ãã¯ã€Kafka よりも Zookeeper +を最åˆã«é–‰ã˜ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚Kafka ã®æŽ¨å¥¨ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã¯ kafka_2.12-2.1.0 ã§ã™ã€‚ + +Kafka サーãƒãƒ¼ã®å®Ÿè¡Œ +******************** + +é‡è¦: Kafka サーãƒãƒ¼ã‚’èµ·å‹•ã™ã‚‹å‰ã«ã€ZooKeeper インスタンスãŒç¨¼åƒã—ã¦ã„ã‚‹ã“ã¨ã‚’確èªã—ã¦ãã ã•ã„。 + +1. Kafka インストールディレクトリã«ç§»å‹•ã—ã¾ã™: **C:\kafka_2.11-0.9.0.0\** + +2. Shift キーを押ã—ãªãŒã‚‰å³ã‚¯ãƒªãƒƒã‚¯ã—ã¦ã‚³ãƒžãƒ³ãƒ‰ãƒ—ロンプトを開ãã€â€œOpen command window hereâ€ ã‚ªãƒ—ã‚·ãƒ§ãƒ³ã‚’é¸æŠžã—ã¾ã™ã€‚ + +3. 次ã«ã€**.\bin\windows\kafka-server-start.bat .\config\server.properties** ã¨å…¥åŠ›ã—ã€Enter キーを押ã—ã¾ã™ã€‚ + +4. ãã—ã¦ã€æ–°ã—ã„コマンドウィンドウ㫠**.\bin\windows\kafka-server-start.bat .\config\server.properties** ã¨å…¥åŠ›ã—ã€ã‚¨ãƒ³ã‚¿ãƒ¼ã‚­ãƒ¼ã‚’押ã—ã¾ã™ã€‚ + +PostgreSQL ã®ã‚»ãƒƒãƒˆã‚¢ãƒƒãƒ— +************************* + +ステップ 1) https://www.postgresql.org/download ã«ã‚¢ã‚¯ã‚»ã‚¹ã—ã¾ã™ã€‚ + +**注æ„**: 推奨ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã¯ postgres10 ã§ã™ã€‚ + +ステップ 2) 2ã¤ã®ã‚ªãƒ—ションãŒã‚りã¾ã™:- + +1. EnterpriseDB ã«ã‚ˆã‚‹ã‚¤ãƒ³ã‚¿ãƒ©ã‚¯ãƒ†ã‚£ãƒ–インストーラー + +2. BigSQL ã«ã‚ˆã‚‹ã‚°ãƒ©ãƒ•ィカルインストーラー + +BigSQL ã¯ç¾åœ¨ã€éžæŽ¨å¥¨ã® pgAdmin version 3 をインストールã—ã¦ã„ã¾ã™ã€‚最新ãƒãƒ¼ã‚¸ãƒ§ãƒ³4 をインストールã™ã‚‹ EnterpriseDB +ã‚’é¸æŠžã™ã‚‹ã“ã¨ã‚’ãŠå‹§ã‚ã—ã¾ã™ã€‚ + +ステップ 3) + +1. 目的㮠Postgre ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã¨ã‚ªãƒšãƒ¬ãƒ¼ãƒ†ã‚£ãƒ³ã‚°ã‚·ã‚¹ãƒ†ãƒ ãŒè¡¨ç¤ºã•れã¾ã™ã€‚Scorpio Broker ã¯ã“ã®ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã§ãƒ†ã‚¹ãƒˆãŠã‚ˆã³é–‹ç™ºã•れã¦ã„ã‚‹ãŸã‚ã€Postgres10 ã‚’é¸æŠžã—ã¾ã™ã€‚ + +2. "Download" ボタンをクリックã™ã‚‹ã¨ã€ãƒ€ã‚¦ãƒ³ãƒ­ãƒ¼ãƒ‰ãŒå§‹ã¾ã‚Šã¾ã™ã€‚ + +ステップ 4) ダウンロードã—㟠.exe ã‚’é–‹ãã€ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã®ã‚ˆã†ã“ãç”»é¢ (welcome screen) ã§ "Next" ボタンをクリックã—ã¾ã™ã€‚ + +ステップ 5) + +1. å¿…è¦ã«å¿œã˜ã¦ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ãƒ‡ã‚£ãƒ¬ã‚¯ãƒˆãƒªã‚’変更ã—ã¾ã™ã€‚ãれ以外ã®å ´åˆã¯ãƒ‡ãƒ•ォルトã®ã¾ã¾ã«ã—ã¾ã™ã€‚ + +2. "Next" ボタンをクリックã—ã¾ã™ã€‚ + +ステップ 6) + +1. システムã«ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã™ã‚‹ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã‚’é¸æŠžã§ãã¾ã™ã€‚Stack Builder ã®ãƒã‚§ãƒƒã‚¯ã‚’外ã—ã¦ã‚‚ã‹ã¾ã„ã¾ã›ã‚“。 + +2. "Next" ボタンをクリックã—ã¾ã™ã€‚ + + +ステップ 7) + +1. データã®å ´æ‰€ã‚’変更ã§ãã¾ã™ã€‚ + +2. "Next" ボタンをクリックã—ã¾ã™ã€‚ + + +ステップ 8) + +1. スーパーユーザーã®ãƒ‘スワードを入力ã—ã¾ã™ã€‚ãれをメモã—ã¦ãã ã•ã„。 + +2. "Next" ボタンをクリックã—ã¾ã™ã€‚ + + +ステップ 9) + +1. ãƒãƒ¼ãƒˆç•ªå·ã¯ãƒ‡ãƒ•ォルトã®ã¾ã¾ã«ã—ã¾ã™ã€‚ + +2. "Next" ボタンをクリックã—ã¾ã™ã€‚ + + +ステップ 10) + +1. インストールå‰ã®æ¦‚è¦ã‚’確èªã—ã¦ãã ã•ã„。 + +2. "Next" ボタンをクリックã—ã¾ã™ã€‚ + +ステップ 11) "Next" ボタンをクリックã—ã¾ã™ã€‚ + +ステップ 12) インストールãŒå®Œäº†ã™ã‚‹ã¨ã€Stack Builder プロンプトãŒè¡¨ç¤ºã•れã¾ã™ã€‚ + +1. ãã®ã‚ªãƒ—ションã®ãƒã‚§ãƒƒã‚¯ã‚’外ã—ã¾ã™ã€‚より高度ãªãƒãƒ¥ãƒ¼ãƒˆãƒªã‚¢ãƒ«ã§ Stack Builder を使用ã—ã¾ã™ã€‚ + +2. "Finish" をクリックã—ã¾ã™ã€‚ + +ステップ 13) PostgreSQL ã‚’èµ·å‹•ã™ã‚‹ã«ã¯ã€"Start" メニューã«ç§»å‹•ã—ã€pgAdmin 4 を検索ã—ã¾ã™ã€‚ + +ステップ 14) pgAdmin ホームページãŒè¡¨ç¤ºã•れã¾ã™ã€‚ + +ステップ 15) å·¦å´ã®ãƒ„リー㧠Servers > Postgre SQL 10 をクリックã—ã¾ã™ + +.. figure:: ../../en/source/figures/dbconfig-1.png + +ステップ 16) + +1. インストール時ã«è¨­å®šã—ãŸã‚¹ãƒ¼ãƒ‘ーユーザーã®ãƒ‘スワードを入力ã—ã¾ã™ã€‚ + +2. "OK" をクリックã—ã¾ã™ã€‚ + +ステップ 17) You will see the Dashboard + +.. figure:: ../../en/source/figures/dbconfig-2.png + +PostgreSQL ã®ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã¯ä»¥ä¸Šã§ã™ã€‚ + +Linux +##### + +JDK ã®ã‚»ãƒƒãƒˆã‚¢ãƒƒãƒ— +****************** + +マシン㫠Java 環境を作æˆã™ã‚‹ã«ã¯ã€JDK をインストールã—ã¾ã™ã€‚ã“れを行ã†ã«ã¯ã€ã‚¿ãƒ¼ãƒŸãƒŠãƒ«ã‚’é–‹ãã€æ¬¡ã®ã‚³ãƒžãƒ³ãƒ‰ã‚’実行ã—ã¾ã™:- + +1. sudo apt-get update + +2. sudo apt-get install openjdk-8-jdk + +JDK ãŒãƒžã‚·ãƒ³ã«æ­£ã—ãインストールã•れã¦ã„ã‚‹ã“ã¨ã‚’確èªã™ã‚‹ã«ã¯ã€ã‚¿ãƒ¼ãƒŸãƒŠãƒ«ã§ã‚³ãƒžãƒ³ãƒ‰ **java -version** を実行ã—ã¦ã€JDK +ã®ãƒãƒ¼ã‚¸ãƒ§ãƒ³ã‚’11ã¨ã—ã¦è¿”ã™å ´åˆã¯ã€æ­£å¸¸ã«æ©Ÿèƒ½ã—ã¦ã„ã¾ã™ã€‚ + +.. figure:: ../../en/source/figures/javaTerminal.png + +Kafka ã®ã‚»ãƒƒãƒˆã‚¢ãƒƒãƒ— +******************** + +マシン㫠Apache Kafka をダウンロードã™ã‚‹ã«ã¯ã€ã‚¿ãƒ¼ãƒŸãƒŠãƒ«ã§æ¬¡ã®ã‚³ãƒžãƒ³ãƒ‰ã‚’1ã¤ãšã¤å®Ÿè¡Œã—ã¾ã™ã€‚ + +1. mkdir kafka + +2. cd kafka + +3. wget https://archive.apache.org/dist/kafka/2.2.0/kafka_2.12-2.2.0.tgz + +4. tar -xzf kafka_2.12-2.2.0.tgz + +Kafka ãŒãƒžã‚·ãƒ³ã«ãƒ€ã‚¦ãƒ³ãƒ­ãƒ¼ãƒ‰ã•れãŸã‚‰ã€æ¬¡ã®ã‚³ãƒžãƒ³ãƒ‰ã‚’押ã—ã¦å®Ÿè¡Œã—ã¾ã™ã€‚ + +1. kafka_2.12-2.2.0/bin/zookeeper-server-start.sh kafka_2.12-2.2.0/config/zookeeper.properties > /dev/null 2>&1 & + +2. kafka_2.12-2.2.0/bin/kafka-server-start.sh kafka_2.12-2.2.0/config/server.properties > /dev/null 2>&1 & + +PostgreSQL ã®ã‚»ãƒƒãƒˆã‚¢ãƒƒãƒ— +************************* + +マシン㫠PostgreSQL をダウンロードã™ã‚‹ã«ã¯ã€ã‚¿ãƒ¼ãƒŸãƒŠãƒ«ã‹ã‚‰æ¬¡ã®ã‚³ãƒžãƒ³ãƒ‰ã‚’実行ã—ã¾ã™ã€‚ + +1. sudo apt update + +2. sudo apt-get install postgresql-10 + +3. service postgresql status + +最後ã®ã‚³ãƒžãƒ³ãƒ‰ã¯ã€ã“れãŒå›³ã®1ã¤ã¨ä¸€è‡´ã™ã‚‹å ´åˆã€ãƒžã‚·ãƒ³ã® PostgreSQL ã®ã‚¹ãƒ†ãƒ¼ã‚¿ã‚¹ã‚’示ã—ã¾ã™ã€‚ã™ã¹ã¦ãŒæ­£ã—ãインストール +ã•れã¦ã„ã‚‹ã‹ã€ã‚³ãƒžãƒ³ãƒ‰ã‚’å†å®Ÿè¡Œã—ã¾ã™ã€‚ + +.. figure:: ../../en/source/figures/postgresTerminal.png + +PostgreSQL ãŒãƒžã‚·ãƒ³ã«æ­£å¸¸ã«ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã•れãŸã‚‰ã€ãƒ‡ãƒ¼ã‚¿ãƒ™ãƒ¼ã‚¹ **ngb** を作æˆã—ã€æ¬¡ã®ã‚³ãƒžãƒ³ãƒ‰ã‚’実行ã—ã¦ãã®å½¹å‰²ã‚’変更ã—ã¾ã™: + +1. psql -U postgres -c "create database ngb;" + +2. psql -U postgres -c "create user ngb with password 'ngb';" + +3. psql -U postgres -c "alter database ngb owner to ngb;" + +4. psql -U postgres -c "grant all privileges on database ngb to ngb;" + +5. psql -U postgres -c "alter role ngb superuser;" + +6. sudo apt install postgresql-10-postgis-2.4 + +7. sudo apt install postgresql-10-postgis-scripts + +8. sudo -u postgres psql -U postgres -c "create extension postgis;" + +ã“れã§ã€PostgreSQL ã‚’ ScorpioBoker ã§ä½¿ç”¨ã™ã‚‹æº–å‚™ãŒæ•´ã„ã¾ã™ã€‚ diff --git a/scorpio-broker/docs/ja/source/security.rst b/scorpio-broker/docs/ja/source/security.rst new file mode 100644 index 0000000000000000000000000000000000000000..7fde013047d76340522dc107943c974bd2065a87 --- /dev/null +++ b/scorpio-broker/docs/ja/source/security.rst @@ -0,0 +1,46 @@ +********************** +Scorpio ã®ã‚»ã‚­ãƒ¥ãƒªãƒ†ã‚£ +********************** + +セキュリティアーキテクãƒãƒ£ +########################## + +Scorpio Broker システムã¯ã€ID ãŠã‚ˆã³èªè¨¼ç®¡ç† (Identity & authentication management) ã®ã‚»ã‚­ãƒ¥ãƒªãƒ†ã‚£ã«ã‚‚責任を負ã„ã¾ã™ã€‚ +ã“れã«ã¯ã€Scorpio Broker セキュリティレルムã§ã‚¢ã‚¯ã‚»ã‚¹ã™ã‚‹è¦æ±‚ã€ãƒ¦ãƒ¼ã‚¶ãƒ¼ã€ãƒ­ãƒ¼ãƒ«ãƒ™ãƒ¼ã‚¹ã§ä¿è­·ã•れãŸãƒªã‚½ãƒ¼ã‚¹ã®èªè¨¼ã¨èªå¯ãŒ +å«ã¾ã‚Œã¾ã™ã€‚ + +Scorpio Broker コンãƒãƒ¼ãƒãƒ³ãƒˆãŠã‚ˆã³ã‚µãƒ¼ãƒ“ス全体ã«ã‚¢ãƒ—リケーション層ã®ã‚»ã‚­ãƒ¥ãƒªãƒ†ã‚£ã‚’æä¾›ã™ã‚‹ã€OAuth2.0 ä»•æ§˜ã«æº–æ‹ ã—㟠+æ–°ã—ã„èªè¨¼ãŠã‚ˆã³èªå¯ã‚µãƒ¼ãƒ“ス (Authentication & Authorization service) ãŒå°Žå…¥ã•れã¾ã—ãŸã€‚ + +.. figure:: ../../en/source/figures/security.png + +セキュリティ - 機能リクエストフロー +################################### + +1. ブラウザ/エンドユーザーã¯ã€API gateway REST interface を使用ã—㦠Scorpio Broker システムã«ä¿è­·ã•れã¦ã„るリソースリクエストをé€ä¿¡ã—ã¾ã™ã€‚ + +2. API Gateway ã¯ã€ã‚»ã‚­ãƒ¥ãƒªãƒ†ã‚£æ©Ÿèƒ½ãŒæœ‰åйã«ãªã£ã¦ã„ã‚‹ã‹ã©ã†ã‹ã‚’確èªã—ã¾ã™ã€‚ + +a. 有効ã«ãªã£ã¦ã„ã‚‹å ´åˆã€ãƒªã‚¯ã‚¨ã‚¹ãƒˆãŒã™ã§ã«èªè¨¼ã•れã¦ãŠã‚Šã€ +既存ã®ã‚»ãƒƒã‚·ãƒ§ãƒ³ãŒã™ã§ã«ã‚ã‚‹ã‹ã©ã†ã‹ã‚’確èªã—ã¾ã™ã€‚ + +- セッションãŒè¦‹ã¤ã‹ã‚‰ãªã„å ´åˆã¯ã€ãƒªã‚¯ã‚¨ã‚¹ãƒˆã‚’èªè¨¼ãŠã‚ˆã³èªå¯ã‚µãƒ¼ãƒ“スã«è»¢é€ã—ã¾ã™ã€‚ã¾ãŸã¯ã€ + +- 既存ã®ã‚»ãƒƒã‚·ãƒ§ãƒ³ãŒè¦‹ã¤ã‹ã£ãŸå ´åˆã€èªè¨¼ç›®çš„ã§åŒã˜ã‚»ãƒƒã‚·ãƒ§ãƒ³ã‚’å†åˆ©ç”¨ã—ã€ãƒªã‚¯ã‚¨ã‚¹ãƒˆã‚’ãƒãƒƒã‚¯ã‚¨ãƒ³ãƒ‰ãƒªã‚½ãƒ¼ã‚¹ã‚µãƒ¼ãƒ“スã«ãƒ«ãƒ¼ãƒ†ã‚£ãƒ³ã‚°ã—ã¾ã™ã€‚ + +b. ã‚»ã‚­ãƒ¥ãƒªãƒ†ã‚£ãŒæœ‰åйã«ãªã£ã¦ã„ãªã„å ´åˆã¯ã€ã‚»ã‚­ãƒ¥ãƒªãƒ†ã‚£ãƒã‚§ãƒƒã‚¯ã‚’ãƒã‚¤ãƒ‘スã—ã€æŒ‡å®šã•れãŸãƒªã‚¯ã‚¨ã‚¹ãƒˆã«å¯¾ã—ã¦ãƒªã‚½ãƒ¼ã‚¹ã‚’ +レンダリングã™ã‚‹è²¬ä»»ãŒã‚ã‚‹ãƒãƒƒã‚¯ã‚¨ãƒ³ãƒ‰ãƒªã‚½ãƒ¼ã‚¹ã‚µãƒ¼ãƒ“スã«ãƒªã‚¯ã‚¨ã‚¹ãƒˆã‚’ルーティングã—ã¾ã™ã€‚ + +3. ã“れã§ã€ãƒªã‚¯ã‚¨ã‚¹ãƒˆãŒ Authentication & Authorization (ç•¥ã—㦠Auth) service ã«å±Šãã¨ã€å…ƒã®ãƒªã‚¯ã‚¨ã‚¹ã‚¿ãƒ¼ã€ã¤ã¾ã‚Šãƒ¦ãƒ¼ã‚¶ãƒ¼/ブラウザーã«ãƒ­ã‚°ã‚¤ãƒ³ãƒ•ォームã§å¿œç­”ã—ã€ãƒªã‚½ãƒ¼ã‚¹ã«ã‚¢ã‚¯ã‚»ã‚¹ã™ã‚‹ãŸã‚ã«ç™ºè¡Œã•れãŸè³‡æ ¼æƒ…å ±ã«åŸºã¥ã„㦠ID ã‚’æç¤ºã—ã¾ã™ã€‚ + +4. ãã®ãŸã‚ã€ãƒ¦ãƒ¼ã‚¶ãƒ¼ã¯èªè¨¼æƒ…報をå«ã‚€ãƒ­ã‚°ã‚¤ãƒ³ãƒ•ォームを Auth service ã«é€ä¿¡ã—ã¾ã™ã€‚Auth service ã¯ã€ã‚¢ã‚«ã‚¦ãƒ³ãƒˆã®è©³ç´°ã«åŸºã¥ã„ã¦ãƒ¦ãƒ¼ã‚¶ãƒ¼ã®è³‡æ ¼æƒ…報を検証ã—ã€æˆåŠŸã—ãŸãƒ­ã‚°ã‚¤ãƒ³èªè¨¼ã‚³ãƒ¼ãƒ‰ (auth code) ã¨ã€ãƒ¦ãƒ¼ã‚¶ãƒ¼ãŒãƒªã‚½ãƒ¼ã‚¹è¦æ±‚をフェッãƒã™ã‚‹ãŸã‚ã«ãƒªãƒ€ã‚¤ãƒ¬ã‚¯ãƒˆã§ãã‚‹ redirect URL ã§å¿œç­”ã™ã‚‹ã‚ˆã†ã«ãªã‚Šã¾ã—ãŸã€‚ +  +5. ユーザー/ブラウザ㯠redirect URL ã«ãƒªãƒ€ã‚¤ãƒ¬ã‚¯ãƒˆã™ã‚‹ã‚ˆã†ã«ãªã‚Šã¾ã—ãŸã€‚ã“ã®å ´åˆã‚‚ã€Auth service ã‹ã‚‰å—ä¿¡ã—㟠auth_code ã‚’å«ã‚€ API gateway URL ã§ã™ã€‚ + +6. API gateway ã¯ã€æ—¢å­˜ã®ã‚»ãƒƒã‚·ãƒ§ãƒ³ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚’見ã¤ã‘るセッションをå†åº¦ãƒã‚§ãƒƒã‚¯ã—ã¾ã™ãŒã€ä»Šå›žã¯ãƒªã‚¯ã‚¨ã‚¹ãƒˆã§ auth_code ã‚’å—ä¿¡ã™ã‚‹ãŸã‚ã€ãã® auth_code を使用ã—ã€ãƒ¦ãƒ¼ã‚¶ãƒ¼ã«ä»£ã‚ã£ã¦ã‚¯ãƒ©ã‚¤ã‚¢ãƒ³ãƒˆã¨ã—ã¦æ©Ÿèƒ½ã™ã‚‹ Auth service ã‹ã‚‰ãƒˆãƒ¼ã‚¯ãƒ³ã‚’リクエストã—ã¾ã™ã€‚èªè¨¼ã‚³ãƒ¼ãƒ‰ (auth code) ã«åŸºã¥ã Auth service ã¯ã€ã™ã§ã«ãƒ­ã‚°ã‚¤ãƒ³ã—ã¦ã„る検証済ã¿ãƒ¦ãƒ¼ã‚¶ãƒ¼ã§ã‚ã‚‹ã“ã¨ã‚’èªè­˜ã—ã€ã‚¢ã‚¯ã‚»ã‚¹ãƒˆãƒ¼ã‚¯ãƒ³ã‚’使用ã—㦠API gateway ã«æˆ»ã‚Šã¾ã™ã€‚ + +7. (åŒã˜ã‚»ã‚­ãƒ¥ãƒªãƒ†ã‚£ セッション コンテキスト内ã§) トークンをå—ä¿¡ã™ã‚‹ã¨ã€API gateway ã¯ã€å…ƒã®è¦æ±‚ã•れãŸãƒªã‚½ãƒ¼ã‚¹/æ“作ã®ãŸã‚ã«ãƒãƒƒã‚¯ã‚¨ãƒ³ãƒ‰ãƒªã‚½ãƒ¼ã‚¹ã‚µãƒ¼ãƒ“スã«ãƒªãƒ¬ãƒ¼/ルーティングã™ã‚‹ã‚ˆã†ã«ãªã‚Šã¾ã—ãŸã€‚ + +8. ãƒãƒƒã‚¯ã‚¨ãƒ³ãƒ‰ãƒªã‚½ãƒ¼ã‚¹ã‚µãƒ¼ãƒ“ã‚¹ã‚‚ã‚»ã‚­ãƒ¥ãƒªãƒ†ã‚£æ©Ÿèƒ½ã§æœ‰åйã«ãªã£ã¦ã„ã¾ã™ (ãã†ã§ãªã„å ´åˆã¯ã€æ¬¡ã®å®‰å…¨ãªè¦æ±‚ã«å¯¾ã—ã¦ã‚¨ãƒ©ãƒ¼ãŒã‚¹ãƒ­ãƒ¼ã•れã¾ã™)。リクエストをå—ä¿¡ã—ã€ãã“ã‹ã‚‰ã‚»ã‚­ãƒ¥ãƒªãƒ†ã‚£ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚’読ã¿å–りã€Auth service ã¨åŒã˜ã‚ˆã†ã« (抽出ã•ã‚ŒãŸæƒ…å ±ã«åŸºã¥ã„ã¦) 検証ã—ã¦ã€ã“ã‚ŒãŒæŒ‡å®šã•ã‚ŒãŸæ¨©é™ã‚’æŒã¤æœ‰åйãªãƒˆãƒ¼ã‚¯ãƒ³/リクエストã§ã‚ã‚‹ã‹ã©ã†ã‹ã‚’確èªã—ã¾ã™ã€‚Auth service ã®å¿œç­”ãƒãƒƒã‚¯ã¨ãƒãƒƒã‚¯ã‚¨ãƒ³ãƒ‰ã‚µãƒ¼ãƒ“スã¯ã€ãƒ­ãƒ¼ã‚«ãƒ«ã‚»ã‚­ãƒ¥ãƒªãƒ†ã‚£æ§‹æˆã¨Auth service ベースã®ã‚¢ã‚¯ã‚»ã‚¹è¨±å¯ãŒä¸€è‡´ã—ã¦ã„ã‚‹ã‹ã©ã†ã‹ã‚’判断ã—ã¾ã™ã€‚ + +9. アクセス許å¯/特権ãŒç€ä¿¡è¦æ±‚ã¨ä¸€è‡´ã™ã‚‹å ´åˆã€è¦æ±‚ã•れãŸãƒªã‚½ãƒ¼ã‚¹ã§ãƒ¦ãƒ¼ã‚¶ãƒ¼/ブラウザーã«å¿œç­”ã—ã¾ã™ã€‚å ´åˆã«ã‚ˆã£ã¦ã¯ã€ã‚¨ãƒ©ãƒ¼ãƒ¡ãƒƒã‚»ãƒ¼ã‚¸ã¨æ‹’å¦ã•れãŸç†ç”±ã§å…ƒã«æˆ»ã‚‹ã‚ˆã‚Šã‚‚ã€ã‚»ã‚­ãƒ¥ãƒªãƒ†ã‚£åŸºæº–ã«ä¸€è‡´ã—ã¾ã›ã‚“。 diff --git a/scorpio-broker/docs/ja/source/systemOverview.rst b/scorpio-broker/docs/ja/source/systemOverview.rst new file mode 100644 index 0000000000000000000000000000000000000000..57c29fc6f7d03798a1029a9f2f9dbf5725335e74 --- /dev/null +++ b/scorpio-broker/docs/ja/source/systemOverview.rst @@ -0,0 +1,77 @@ +************** +アーキテクãƒãƒ£ +************** + +デプロイメントアーキテクãƒãƒ£ã¯ã€ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スã®å¤šãã®æ‡¸å¿µäº‹é … (スケーリングã€ãƒ¢ãƒ‹ã‚¿ãƒªãƒ³ã‚°ã€ãƒ•ォールトトレラント〠+高å¯ç”¨æ€§ã€ã‚»ã‚­ãƒ¥ã‚¢ã€åˆ†é›¢ãªã©) ã«å¯¾å‡¦ã™ã‚‹ Spring Cloud フレームワークã¨ã€åˆ†æ•£åž‹ã§ã‚¹ã‚±ãƒ¼ãƒ©ãƒ–ルãªãƒ¡ãƒƒã‚»ãƒ¼ã‚¸ã‚­ãƒ¥ãƒ¼ +インフラストラクãƒãƒ£ã«åŸºã¥ã„ã¦ãŠã‚Šã€IoT ドメインã§é€šå¸¸è¡Œã‚れã¦ã„ã‚‹è†¨å¤§ãªæ•°ã®ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆãƒªã‚¯ã‚¨ã‚¹ãƒˆã®ãƒ¡ãƒƒã‚»ãƒ¼ã‚¸å‡¦ç†ã§ +高ã„パフォーマンスをæä¾›ã™ã‚‹ Kafka を活用ã—ã¾ã™ã€‚ + +ã“れã¯ã€å¤–部ã‹ã‚‰ Scorpio Broker システムã¸ã®é«˜ãƒ¬ãƒ™ãƒ«ãªæ“作 (POST/GET/DELETE/PATCH メソッドを使用ã—㟠HTTP ベース㮠REST) +ã®è¦æ±‚フローをカãƒãƒ¼ã—ã¦ã„ã¾ã™ã€‚外部リクエストã¯ã€Scorpio Broker ã‚·ã‚¹ãƒ†ãƒ ãŒæä¾›ã§ãã‚‹ã™ã¹ã¦ã®ã‚µãƒ¼ãƒ“スã«ä½¿ç”¨ã•れるå˜ä¸€ã® +IP/port ã®çµ„ã¿åˆã‚ã›ã‚’公開ã™ã‚‹çµ±åˆã‚µãƒ¼ãƒ“ス API gateway イインターフェイスを介ã—ã¦æä¾›ã•れã¾ã™ã€‚実際ã«ã¯ã€Scorpio Broker +ã®å„サービスã¯ã€åˆ†æ•£ã‚³ãƒ³ãƒ”ューティング環境ã§ç‹¬ç«‹ã—ãŸã‚¹ã‚¿ãƒ³ãƒ‰ã‚¢ãƒ­ãƒ³ãƒ¦ãƒ‹ãƒƒãƒˆã¨ã—ã¦å±•é–‹ã§ãるマイクロサービスã¨ã—㦠+実装ã•れã¦ã„ã¾ã™ã€‚API gateway ã¯ã€ãƒ‡ã‚£ã‚¹ã‚«ãƒãƒªãƒ¼ãŠã‚ˆã³ãƒ¬ã‚¸ã‚¹ãƒˆãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã‚µãƒ¼ãƒ“ス (Discovery & registration service) +を利用ã—ã¦ã€ã™ã¹ã¦ã®ç€ä¿¡è¦æ±‚を特定ã®ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スã«ãƒ«ãƒ¼ãƒ†ã‚£ãƒ³ã‚°ã—ã¾ã™ã€‚ãƒªã‚¯ã‚¨ã‚¹ãƒˆãŒæ“作è¦ä»¶ã«åŸºã¥ã„ã¦ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“ス +ã«åˆ°é”ã™ã‚‹ã¨ã€ãƒªã‚¢ãƒ«ã‚¿ã‚¤ãƒ ã‚¹ãƒˆãƒ¬ãƒ¼ã‚¸ã¨ãƒ¡ãƒƒã‚»ãƒ¼ã‚¸ã‚­ãƒ¥ãƒ¼ã‚’介㗠(è¦ä»¶ã«åŸºã¥ã) ã•ã¾ã–ã¾ãªãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“ス間ã®ç›¸äº’通信㫠+(pub/sub) Kafka トピック (メッセージキュー) を使用ã—ã¾ã™ã€‚ + +.. figure:: ../../en/source/figures/architecture.png + +- **Application**: エンドユーザー/ドメインアプリケーションã¯ã€Scorpio Broker を活用ã—ã¦ã€IoT インフラストラクãƒãƒ£ã«é–¢ã™ã‚‹å¿…è¦ãªæƒ…報をæä¾›ã—ã¾ã™ã€‚ã“ã®ã‚¢ãƒ—リケーションã¯ã€è¦ä»¶ã«å¿œã˜ã¦ã€Scorpio Broker ã¨ã®é–“ã§ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆæƒ…報をクエリã€ã‚µãƒ–スクライブã€ã‚¢ãƒƒãƒ—デートã§ãã¾ã™ã€‚ + +- **Consumers**: ã“れらã¯ã€Scorpio Broker ã®ãƒ‡ãƒ¼ã‚¿ã‚’消費ã™ã‚‹ IoT エンティティã¾ãŸã¯ã‚¢ãƒ—リケーションã§ã™ã€‚ + +- **Producers**: ã“れらã¯ã€Scorpio Broker ã«ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆãƒ‡ãƒ¼ã‚¿ã‚’生æˆã™ã‚‹ IoT エンティティã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚½ãƒ¼ã‚¹ã€ã¾ãŸã¯ アプリケーションã§ã™ã€‚ + +- **Service API Gateway**: ã“れã¯ã€REST ベース㮠HTTP インターフェースを介ã—ã¦å…¬é–‹ã•れ㟠Scorpio Broker システムã®å†…部サービスã«ã‚¢ã‚¯ã‚»ã‚¹ã™ã‚‹ãŸã‚ã®å¤–部世界㮠Proxy gateway ã§ã™ã€‚Scorpio Broker ã«é–¢é€£ã™ã‚‹ã™ã¹ã¦ã®å†…部サービスã«ã¯ã€ã“ã® Service API gateway を介ã—ã¦ã€å˜ä¸€ã® (通常ã¯é™çš„ãª) IP ã¨ãƒãƒ¼ãƒˆã‚’使用ã—ã€URL ã®ã‚µãƒ¼ãƒ“スåã‚’æ‹¡å¼µã—ã¦ã‚¢ã‚¯ã‚»ã‚¹ã§ãã¾ã™ã€‚ã—ãŸãŒã£ã¦ã€ãƒ¦ãƒ¼ã‚¶ãƒ¼ã¯ã€å‹•çš„ã«å¤‰æ›´ã•れるã“ã¨ãŒå¤šã„ã™ã¹ã¦ã®ã‚µãƒ¼ãƒ“ス㮠IP ã¨ãƒãƒ¼ãƒˆã‚’ç®¡ç† (ã¾ãŸã¯å­¦ç¿’ã¾ãŸã¯ä½¿ç”¨) ã™ã‚‹å¿…è¦ã¯ã‚りã¾ã›ã‚“。ã“れã«ã‚ˆã‚Šã€ç‰¹ã«è¤‡æ•°ã®ã‚µãƒ¼ãƒ“ス (ã¾ãŸã¯ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“ス) ãŒ1ã¤ã®ã‚·ã‚¹ãƒ†ãƒ ã§å®Ÿè¡Œã•れã¦ã„ã‚‹å ´åˆã«ã€ä½œæ¥­ãŒæ¥½ã«ãªã‚Šã¾ã™ã€‚ã“れã¯ã€ã™ã¹ã¦ã®ãƒãƒƒã‚¯ã‚¨ãƒ³ãƒ‰ã‚µãƒ¼ãƒ“ス㫠Proxy gateway (ã¤ã¾ã‚Šã€Service API Gateway) を使用ã™ã‚‹ã“ã¨ã§ç°¡å˜ã«è§£æ±ºã§ãã¾ã™ã€‚ + +- **Rest Interface**: ã“れらã¯ã€Scorpio Broker ã§ç‰¹å®šã®æ“作を実行ã™ã‚‹ãŸã‚ã«æ¶ˆè²»ã™ã‚‹å¤–部エンティティ/アプリケーション㮠HTTP ベースã®ã‚¤ãƒ³ã‚¿ãƒ¼ãƒ•ェースã§ã™ã€‚外部インターフェース㯠Service API gateway を介ã—ã¦è¡¨ç¤ºã•れã€è¦æ±‚ã•れãŸå„サービスã¸ã®å†…部インターフェースã®ãƒžãƒƒãƒ”ングã¯ã€Service Discovery & Registration モジュールを介ã—ã¦æ¤œå‡ºã•れã¾ã™ã€‚ + +- **Service Discovery & Registration**: ã“ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã‚’使用ã™ã‚‹ã¨ã€ä»»æ„ã®ã‚µãƒ¼ãƒ“ス (Webサービス/マイクロサービス) をレジストレーションã§ãã‚‹ãŸã‚ã€ã“ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã®ãƒ‡ã‚£ã‚¹ã‚«ãƒãƒªãƒ¼æ©Ÿèƒ½ã‚’使用ã™ã‚‹ã‚¯ãƒ©ã‚¤ã‚¢ãƒ³ãƒˆã¯ã€è¦æ±‚ã®é€ä¿¡å…ˆã¨ãªã‚‹ã‚µãƒ¼ãƒ“スインスタンスã®å ´æ‰€ã‚’特定ã§ãã¾ã™ã€‚ã¤ã¾ã‚Šã€Service Discovery & Registration ã¯ã€ã‚µãƒ¼ãƒ“スã€ãã®ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã€ãŠã‚ˆã³ãã®å ´æ‰€ã®ãƒ‡ãƒ¼ã‚¿ãƒ™ãƒ¼ã‚¹ã‚’実装ã—ã¾ã™ã€‚サービスインスタンスã¯ã€èµ·å‹•時ã«ã‚µãƒ¼ãƒ“スレジストリã«ç™»éŒ²ã•れã€ã‚·ãƒ£ãƒƒãƒˆãƒ€ã‚¦ãƒ³æ™‚ã«ç™»éŒ²ã¯è§£é™¤ã•れã¾ã™ã€‚サービスã®ã‚¯ãƒ©ã‚¤ã‚¢ãƒ³ãƒˆã¯ã€ã‚µãƒ¼ãƒ“スã®åˆ©ç”¨å¯èƒ½ãªã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã‚’検出ã™ã‚‹ã‚µãƒ¼ãƒ“スレジストリã«ã‚¯ã‚¨ãƒªã‚’実行ã—ã¾ã™ã€‚サービスレジストリã¯ã€ã‚µãƒ¼ãƒ“スインスタンス㮠Health check API を呼ã³å‡ºã—ã¦ã€ãƒªã‚¯ã‚¨ã‚¹ãƒˆã‚’処ç†ã§ãã‚‹ã“ã¨ã‚’確èªã™ã‚‹å ´åˆã‚‚ã‚りã¾ã™ã€‚ + +- **Entity Manager**: ã“ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã¯ã€Scorpio Broker ã®ä»–ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã‚’使用ã—ã¦ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã«é–¢é€£ã™ã‚‹ã™ã¹ã¦ã® CRUD æ“作を処ç†ã—ã¾ã™ã€‚ + +- **LD Context Resolver**: ã“ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã¯ã€Scorpio Broker ã®ä»–ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã«ã‚ˆã‚‹ã•らãªã‚‹å‡¦ç†ã®ãŸã‚ã«ã€JSON-LD @context ã«åŸºã¥ã„㦠NGSI-LD ドキュメントを拡張ã™ã‚‹å½¹å‰²ã‚’æžœãŸã—ã¾ã™ã€‚ + +- **Subscription & Notification Manager**: ã“ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã¯ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ãŠã‚ˆã³/ã¾ãŸã¯ csource subscription & notification ã«é–¢é€£ã™ã‚‹ CRUD æ“作ã®å‡¦ç†ã‚’担当ã—ã¾ã™ã€‚ + +- **Query Manager**: ã“ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã¯ã€Scorpio Broker ã¸ã®å˜ç´”ã¾ãŸã¯è¤‡é›‘ãªã‚¯ã‚¨ãƒª (ジオクエリãªã©) を処ç†ã—ã¾ã™ã€‚ + +- **Storage Manager**: ã“ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã¯ã€ãƒ¡ãƒƒã‚»ãƒ¼ã‚¸ãƒ–ローカーã‹ã‚‰ãƒ‡ãƒ¼ã‚¿ã‚’フェッãƒã—ã€ãれらを関連ã™ã‚‹ã‚¹ã‚­ãƒ¼ãƒžå½¢å¼ã«å¤‰æ›ã—ã¦ã€DB テーブルã«ä¿æŒã™ã‚‹å½¹å‰²ã‚’æžœãŸã—ã¾ã™ã€‚ã•らã«ã€ã“ã®ãƒžãƒãƒ¼ã‚¸ãƒ£ãƒ¼ã¯ã€DB ã¸ã®è¤‡é›‘ãªã‚¯ã‚¨ãƒª (ジオクエリやクロスドメイン エンティティ コンテキスト リレーションシップ クエリãªã©) ã®ã‚¤ãƒ³ã‚¿ãƒ¼ãƒ•ェイスもæä¾›ã—ã¾ã™ã€‚ + +- **Context Registry Manager**: ã“ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã¯ã€csource registration/query/subscription ã® CRUD æ“作用ã®ã‚¤ãƒ³ã‚¿ãƒ¼ãƒ•ェイスをæä¾›ã™ã‚‹å½¹å‰²ã‚’æžœãŸã—ã¾ã™ã€‚ + +- **Health Check & Monitoring**: ã“ã®ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã¯ã€å®Ÿè¡Œä¸­ã®ã‚µãƒ¼ãƒ“スã¨ã‚¤ãƒ³ãƒ•ラストラクãƒãƒ£ã®çŠ¶æ…‹ã‚’ç›£è¦–ã™ã‚‹å½¹å‰²ã‚’æžœãŸã—ã¾ã™ã€‚ + +- **Message Bus Handler**: Scorpio Broker ã®ã™ã¹ã¦ã®ãƒ¢ã‚¸ãƒ¥ãƒ¼ãƒ«ã¯ã€ãƒ¢ã‚¸ãƒ¥ãƒ¼ãƒ«é–“ã§ãƒ¡ãƒƒã‚»ãƒ¼ã‚¸ã‚’交æ›ã™ã‚‹ãŸã‚ã«ãƒã‚¹ã¨é€šä¿¡ã™ã‚‹å¿…è¦ãŒã‚ã‚‹å ´åˆãŒã‚りã¾ã™ã€‚ã“ã®ã‚¤ãƒ³ã‚¿ãƒ¼ãƒ•ェースã¯ã€ãƒ¡ãƒƒã‚»ãƒ¼ã‚¸ãƒã‚¹ãƒãƒ³ãƒ‰ãƒ©ãƒ¼ã«ã‚ˆã£ã¦æä¾›ã•れã¾ã™ã€‚ + +- **Storage Connectors**: Scorpio Broker ã¯ã€ç‰¹å®šã®æƒ…報をã•ã¾ã–ã¾ãª DB å½¢å¼ã§ä¿å­˜ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ã—ãŸãŒã£ã¦ã€ã‚¹ãƒˆãƒ¬ãƒ¼ã‚¸ã‚³ãƒã‚¯ã‚¿ (ä»»æ„ã®ã‚¿ã‚¤ãƒ—ã®ãƒ¡ãƒƒã‚»ãƒ¼ã‚¸ãƒ–ローカー方法論を使用) ã¯ã€ãれらã®ã‚¹ãƒˆãƒ¬ãƒ¼ã‚¸ã‚·ã‚¹ãƒ†ãƒ  (ローカルã¾ãŸã¯ãƒªãƒ¢ãƒ¼ãƒˆã«å­˜åœ¨ã™ã‚‹å¯èƒ½æ€§ãŒã‚りã¾ã™) ã«æŽ¥ç¶šã™ã‚‹æ–¹æ³•ã‚’æä¾›ã—ã¾ã™ã€‚ãŸã¨ãˆã°ã€ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£æƒ…å ±ã¯ã€MySQLã€PostgreSQLã€Bigdata ãªã©ã®ã•ã¾ã–ã¾ãªã‚¿ã‚¤ãƒ—ã®ã‚¹ãƒˆãƒ¬ãƒ¼ã‚¸ã‚·ã‚¹ãƒ†ãƒ ã«ä¿å­˜/ストリーミングã§ãã¾ã™ã€‚ã“れらã®ã‚³ãƒã‚¯ã‚¿ã¯ã€ã‚¹ãƒˆãƒ¬ãƒ¼ã‚¸ã®å¾©å…ƒåŠ›ã®ç›®çš„ã§å®Ÿè£…ã™ã‚‹ã“ã¨ã‚‚ã§ãã¾ã™ã€‚ + +- **Context Registry Connector**: Scorpio Broker ã¯ã€ç™»éŒ²ã•れãŸã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚½ãƒ¼ã‚¹ (ブローカー/プロãƒã‚¤ãƒ€ãƒ¼) ã¨ãれらãŒã‚µãƒãƒ¼ãƒˆã™ã‚‹ãƒ‡ãƒ¼ã‚¿ãƒ¢ãƒ‡ãƒ«ã®ã‚¿ã‚¤ãƒ—ã«ã¤ã„ã¦çŸ¥ã‚‹ãŸã‚ã«ã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆãƒ¬ã‚¸ã‚¹ãƒˆãƒªã¨é€šä¿¡ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚コンテキストレジストリコãƒã‚¯ã‚¿ã‚’使用ã™ã‚‹ã¨ã€ãƒ¡ãƒƒã‚»ãƒ¼ã‚¸ãƒ–ローカメカニズムã¯ã€ãƒ•ェデレーションモードã§ãƒ­ãƒ¼ã‚«ãƒ«ã¾ãŸã¯ãƒªãƒ¢ãƒ¼ãƒˆã§å®Ÿè¡Œã•れã¦ã„ã‚‹å¯èƒ½æ€§ã®ã‚ã‚‹ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆãƒ¬ã‚¸ã‚¹ãƒˆãƒªã«æŽ¥ç¶šã§ãã¾ã™ã€‚ + +- **Storage**:ã“れã¯ã€ãƒ‡ãƒ¼ã‚¿ãŒæ°¸ç¶šåŒ–ã•れる実際ã®ã‚¹ãƒˆãƒ¬ãƒ¼ã‚¸ (Postgres/Postgis ãªã©) ã§ã™ã€‚ + +- **Context Registry**: ã“れã¯ã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆã‚½ãƒ¼ã‚¹/プロデューサーã®ãƒ¬ã‚¸ã‚¹ãƒˆãƒ¬ãƒ¼ã‚·ãƒ§ãƒ³ã‚’ä¿å­˜ã™ã‚‹è²¬ä»»ãŒã‚るコンãƒãƒ¼ãƒãƒ³ãƒˆã§ã™ã€‚ + +***************************** +デプロイメントアーキテクãƒãƒ£ +***************************** + +ã“ã®ã‚»ã‚¯ã‚·ãƒ§ãƒ³ã§ã¯ã€ã•ã¾ã–ã¾ãªãƒ†ã‚¯ãƒŽãƒ­ã‚¸ãƒ¼ã‚¹ã‚¿ãƒƒã‚¯ã‚’使用ã—ã¦ã„ã‚‹ Scorpio Broker ã®ãƒ‡ãƒ—ロイメントアーキテクãƒãƒ£ã«ã¤ã„㦠+説明ã—ã¾ã™ã€‚ + +.. figure:: ../../en/source/figures/deploymentarchitecture.png + +デプロイメントアーキテクãƒãƒ£ã¯ã€ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スã®å¤šãã®æ‡¸å¿µäº‹é … (スケーリングã€ãƒ¢ãƒ‹ã‚¿ãƒªãƒ³ã‚°ã€ãƒ•ォールトトレラント〠+高å¯ç”¨æ€§ã€ã‚»ã‚­ãƒ¥ã‚¢ã€åˆ†é›¢ãªã©) ã«å¯¾å‡¦ã™ã‚‹ Spring Cloud フレームワークã¨ã€åˆ†æ•£åž‹ã§ã‚¹ã‚±ãƒ¼ãƒ©ãƒ–ルãªãƒ¡ãƒƒã‚»ãƒ¼ã‚¸ã‚­ãƒ¥ãƒ¼ +インフラストラクãƒãƒ£ã«åŸºã¥ã„ã¦ãŠã‚Šã€IoT ドメインã§é€šå¸¸è¡Œã‚れã¦ã„ã‚‹è†¨å¤§ãªæ•°ã®ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆãƒªã‚¯ã‚¨ã‚¹ãƒˆã®ãƒ¡ãƒƒã‚»ãƒ¼ã‚¸å‡¦ç†ã§ +高ã„パフォーマンスをæä¾›ã™ã‚‹ Kafka を活用ã—ã¾ã™ã€‚デプロイメントアーキテクãƒãƒ£ã¯ã€å¤–部ã‹ã‚‰ Scorpio Broker システムã¸ã® +é«˜ãƒ¬ãƒ™ãƒ«ãªæ“作 (POST/GET/DELETE/PATCH メソッドを使用ã—㟠HTTP ベース㮠REST) ã®è¦æ±‚フローをカãƒãƒ¼ã—ã¾ã™ã€‚ +外部リクエストã¯ã€Scorpio Broker ã‚·ã‚¹ãƒ†ãƒ ãŒæä¾›ã§ãã‚‹ã™ã¹ã¦ã®ã‚µãƒ¼ãƒ“スã«ä½¿ç”¨ã•れるå˜ä¸€ã® IP/port ã®çµ„ã¿åˆã‚ã›ã‚’公開ã™ã‚‹ +Unified service API gateway interface を介ã—ã¦æä¾›ã•れã¾ã™ã€‚実際ã«ã¯ã€Scorpio Broker ã®å„サービスã¯ã€ +分散コンピューティング環境ã§ç‹¬ç«‹ã—ãŸã‚¹ã‚¿ãƒ³ãƒ‰ã‚¢ãƒ­ãƒ³ãƒ¦ãƒ‹ãƒƒãƒˆã¨ã—ã¦å±•é–‹ã§ãるマイクロサービスã¨ã—ã¦å®Ÿè£…ã•れã¾ã™ã€‚ãã® +API gateway ã¯ã€Discovery & registration service を利用ã—ã¦ã€ã™ã¹ã¦ã®ç€ä¿¡è¦æ±‚を特定ã®ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スã«ãƒ«ãƒ¼ãƒ†ã‚£ãƒ³ã‚° +ã—ã¾ã™ã€‚ãƒªã‚¯ã‚¨ã‚¹ãƒˆãŒæ“作è¦ä»¶ã«åŸºã¥ã„ã¦ãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“スã«åˆ°é”ã™ã‚‹ã¨ã€ãƒªã‚¢ãƒ«ã‚¿ã‚¤ãƒ ã‚¹ãƒˆãƒ¬ãƒ¼ã‚¸ã¨ãƒ¡ãƒƒã‚»ãƒ¼ã‚¸ã‚­ãƒ¥ãƒ¼ã‚’介ã—㟠+(è¦ä»¶ã«åŸºã¥ã) ã•ã¾ã–ã¾ãªãƒžã‚¤ã‚¯ãƒ­ã‚µãƒ¼ãƒ“ス間ã®ç›¸äº’通信 (è¦ä»¶ã«åŸºã¥ã) ã« (pub / sub) Kafkaトピック (メッセージキュー) +を使用ã—ã¾ã™ã€‚ diff --git a/scorpio-broker/docs/ja/source/testing.rst b/scorpio-broker/docs/ja/source/testing.rst new file mode 100644 index 0000000000000000000000000000000000000000..8b9fca92c26b0133c7d6f5b3f72f07a864d720cd --- /dev/null +++ b/scorpio-broker/docs/ja/source/testing.rst @@ -0,0 +1,35 @@ +Tests +===== + +Scorpio ã«ã¯2セットã®ãƒ†ã‚¹ãƒˆãŒã‚りã¾ã™ã€‚ユニットテストã«ã¯ JUnit を使用ã—ã€ã‚·ã‚¹ãƒ†ãƒ ãƒ†ã‚¹ãƒˆã«ã¯ npm test ベース㮠FIWARE +NGSI-LD Testsuite を使用ã—ã¾ã™ã€‚ + +ユニットテストã®å®Ÿè¡Œ +-------------------- + +Scorpio 内ã®ãƒ­ã‚¸ãƒƒã‚¯ã®å¤šãã¯ã€Kafka ã¨çµ¡ã¿åˆã£ã¦ã„ã¾ã™ã€‚ã—ãŸãŒã£ã¦ã€å¤šãã®å˜ä½“テストã§ã¯ã€å®Ÿè¡Œä¸­ã® Kafka インスタンス㌠+å¿…è¦ã§ã™ã€‚インストールã®ç« ã®èª¬æ˜Žã«å¾“ã£ã¦ã€Kafka サーãƒãƒ¼ã¨ zookeeper ã‚’èµ·å‹•ã—ã¾ã™ã€‚実行ã™ã‚‹ã“ã¨ã«ã‚ˆã‚Šã€goal test +を使用ã—㦠Maven を介ã—ã¦æ˜Žç¤ºçš„ã«ãƒ†ã‚¹ãƒˆã‚’実行ã§ãã¾ã™ã€‚ + +.. code:: console + + mvn test + +Maven コマンド㫠-DskipTests を追加ã—ãªã„é™ã‚Šã€ãƒ†ã‚¹ãƒˆã¯ã€goals パッケージã€ã‚¤ãƒ³ã‚¹ãƒˆãƒ¼ãƒ«ã€æ¤œè¨¼ã€ãŠã‚ˆã³ãƒ‡ãƒ—ロイを使用 +ã—ã¦å®Ÿè¡Œã•れã¾ã™ã€‚ルートディレクトリã‹ã‚‰ Maven コマンドを実行ã—ã¦ã™ã¹ã¦ã®ãƒ†ã‚¹ãƒˆã‚’実行ã™ã‚‹ã“ã¨ã‚‚ã€å¯¾å¿œã™ã‚‹ãƒ‡ã‚£ãƒ¬ã‚¯ãƒˆãƒªã§ +Maven コマンドを実行ã—ã¦å€‹ã€…ã®ãƒ†ã‚¹ãƒˆã‚’実行ã™ã‚‹ã“ã¨ã‚‚ã§ãã¾ã™ã€‚ + +FIWARE NGSI-LD Testsuite +------------------------ + +Testsuite を実行ã™ã‚‹ã«ã¯ã€ã‚³ãƒ³ãƒãƒ¼ãƒãƒ³ãƒˆã®é–‹å§‹ã®ç« ã§èª¬æ˜Žã•れã¦ã„るよã†ã«Scorpio ã®ã‚¤ãƒ³ã‚¹ã‚¿ãƒ³ã‚¹ã‚’実行ã™ã‚‹ã‹ã€ +dockercontainer を使用ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚Testsuite ã¯ã€Testsuite をセットアップã—ã¦é–‹å§‹ã™ã‚‹æ–¹æ³•ã®å®Œå…¨ãªæ‰‹é †ã¨ã¨ã‚‚ã« +ã“ã“ã«ã‚りã¾ã™ã€‚包括的ãªãƒãƒ¼ã‚¸ãƒ§ãƒ³ã¯æ¬¡ã®ã¨ãŠã‚Šã§ã™ã€‚システム㫠npm をインストールã—ã¾ã™ã€‚ +`ã“ã“ `__ ã‹ã‚‰ Testsuite をダウンロードã—ã¾ã™ã€‚ +Testsuite を抽出ã—ã¾ã™ã€‚npm install を実行ã—ã¾ã™ã€‚Testsuite フォルダーã«ã™ã¹ã¦ã®ä¾å­˜é–¢ä¿‚をインストールã—ã¾ã™ã€‚ +4ã¤ã®ç’°å¢ƒå¤‰æ•°ã‚’設定ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚TEST\_ENDPOINT ã¯ãƒ–ローカーã§ã™ã€‚ã—ãŸãŒã£ã¦ã€Scorpio ã®å ´åˆã€ãƒ‡ãƒ•ォルト㯠+http://localhost:9090 ã§ã‚ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚WEB\_APP\_PORT ã¯ã€Testsuite ã®ãƒãƒ¼ãƒˆã§ã™ã€‚ã“れã¯ã€ä»¥ä¸‹ã®ã™ã¹ã¦ã®ãƒãƒ¼ãƒˆã¨ +一致ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™ã€‚ 例ãˆã°ã€4444 - ACC\_ENDPOINT 㯠Testsuit ã®ã‚¨ãƒ³ãƒ‰ãƒã‚¤ãƒ³ãƒˆã§ã™ã€‚例ãˆã°ã€ +http://localhost:4444 - NOTIFY\_ENDPOINT ã¯ã€ãƒ†ã‚¹ãƒˆã®ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションã®ã‚¨ãƒ³ãƒ‰ãƒã‚¤ãƒ³ãƒˆã§ã™ã€‚/acc ã§çµ‚了ã™ã‚‹å¿…è¦ãŒ +ã‚りã¾ã™ã€‚Scorpio ã‚’èµ·å‹•ã—ã¾ã™ã€‚``console node accumulator/accumulator.js &'`` を実行ã—ã¦ã€accumulator/notification +エンドãƒã‚¤ãƒ³ãƒˆã‚’èµ·å‹•ã—ã¾ã™ã€‚``console npm test'`` ã§ãƒ†ã‚¹ãƒˆã‚’é–‹å§‹ã—ã¾ã™ã€‚ diff --git a/scorpio-broker/docs/ja/source/troubleshooting.rst b/scorpio-broker/docs/ja/source/troubleshooting.rst new file mode 100644 index 0000000000000000000000000000000000000000..15f95033e183934ad0e07e243eda4b865b0cbfe0 --- /dev/null +++ b/scorpio-broker/docs/ja/source/troubleshooting.rst @@ -0,0 +1,38 @@ +***************** +トラブルシューティング +***************** + +Missing JAXB dependencies +========================= + +eureka-server ã‚’èµ·å‹•ã™ã‚‹ã¨ã€**java.lang.TypeNotPresentException: Type javax.xml.bind.JAXBContext not present** 例外㌠+発生ã™ã‚‹å¯èƒ½æ€§ãŒã‚りã¾ã™ã€‚ãã®å ´åˆã€ãƒžã‚·ãƒ³ã§ Java 11 を実行ã—ã¦ã„ã‚‹å¯èƒ½æ€§ãŒéžå¸¸ã«é«˜ããªã‚Šã¾ã™ã€‚Java 9 パッケージ以é™ã€ +`javax.xml.bind` ã¯éžæŽ¨å¥¨ã¨ã—ã¦ãƒžãƒ¼ã‚¯ã•れã€Java 11 ã§æœ€çµ‚çš„ã«å®Œå…¨ã«å‰Šé™¤ã•れã¾ã—ãŸã€‚ + +ã“ã®å•題を修正ã—㦠eureka-server を実行ã™ã‚‹ã«ã¯ã€é–‹å§‹ã™ã‚‹å‰ã«ã€ä»¥ä¸‹ã® JAXB Maven ä¾å­˜é–¢ä¿‚ã‚’ +`ScorpioBroker/SpringCloudModules/eureka/pom.xml` ã«æ‰‹å‹•ã§è¿½åŠ ã™ã‚‹å¿…è¦ãŒã‚りã¾ã™: + + +.. code:: xml + + ... + + ... + + com.sun.xml.bind + jaxb-core + 2.3.0.1 + + + javax.xml.bind + jaxb-api + 2.3.1 + + + com.sun.xml.bind + jaxb-impl + 2.3.1 + + ... + + ... diff --git a/scorpio-broker/docs/roadmap.ja.md b/scorpio-broker/docs/roadmap.ja.md new file mode 100644 index 0000000000000000000000000000000000000000..4d620d173ec766fbca81626c9f9614177e6ccf67 --- /dev/null +++ b/scorpio-broker/docs/roadmap.ja.md @@ -0,0 +1,63 @@ +# Scorpio Broker ロードマップ + +Scorpio Broker: ã“ã®ãƒ—ロダクト㯠Incubated FIWARE Generic Enabler ã§ã™ã€‚FIWARE ã®å…¨ä½“çš„ãªãƒ­ãƒ¼ãƒ‰ãƒžãƒƒãƒ—ã«ã¤ã„ã¦çŸ¥ã‚ŠãŸã„ +å ´åˆã¯ã€FIWARE Catalogue ã® "Roadmap" セクションを確èªã—ã¦ãã ã•ã„。Scorpio Broker ã¯ã€ã‚³ãƒ³ãƒ†ã‚­ã‚¹ãƒˆæƒ…報管ç†ã«é–¢ã™ã‚‹ +ETSI Industry Specification Group ã«ã‚ˆã£ã¦æŒ‡å®šã•れ㟠NGSI-LD インターフェイスを実装ã—ã¾ã™ã€‚ + +## イントロダクション + +ã“ã®ã‚»ã‚¯ã‚·ãƒ§ãƒ³ã§ã¯ã€è¿‘ã„å°†æ¥ã«ãƒ—ロダクトã«è¿½åŠ ã•れるã¨äºˆæƒ³ã•ã‚Œã‚‹ã€ææ¡ˆã•ã‚ŒãŸæ–°ã—ã„æ©Ÿèƒ½ã¾ãŸã¯ã‚¿ã‚¹ã‚¯ã«ã¤ã„ã¦è©³ã—ã説明 +ã—ã¾ã™ã€‚ç‰¹å®šã®æ—¥ä»˜ã¾ãŸã¯æŒ‡å®šã•れãŸé †åºã§ã“ã‚Œã‚‰ã®æ©Ÿèƒ½ã‚’æä¾›ã™ã‚‹ã¨ã„ã†ã‚³ãƒŸãƒƒãƒˆãƒ¡ãƒ³ãƒˆã‚’想定ã™ã‚‹ã¹ãã§ã¯ã‚りã¾ã›ã‚“。 +開発ãƒãƒ¼ãƒ ã¯ã€ææ¡ˆã•ã‚ŒãŸæ—¥ä»˜ã¨å„ªå…ˆé †ä½ã«å¾“ã†ã‚ˆã†ã«æœ€å–„ã‚’å°½ãã—ã¾ã™ãŒã€ç‰¹å®šã®æ©Ÿèƒ½ã¾ãŸã¯ã‚¿ã‚¹ã‚¯ã«å–ã‚Šçµ„ã‚€è¨ˆç”»ã¯æ”¹è¨‚ã•れる +å¯èƒ½æ€§ãŒã‚ã‚‹ã“ã¨ã«æ³¨æ„ã—ã¦ãã ã•ã„。ã™ã¹ã¦ã®æƒ…å ±ã¯ä¸€èˆ¬çš„ãªã‚¬ã‚¤ãƒ‰ãƒ©ã‚¤ãƒ³ã¨ã—ã¦ã®ã¿æä¾›ã•れã¦ãŠã‚Šã€ã“ã®ã‚»ã‚¯ã‚·ãƒ§ãƒ³ã¯ +ã„ã¤ã§ã‚‚æ–°ã—ã„æƒ…報をæä¾›ã™ã‚‹ãŸã‚ã«æ”¹è¨‚ã•れるå¯èƒ½æ€§ãŒã‚りã¾ã™ã€‚ + +å…責事項: + + 1. ã“ã®ã‚»ã‚¯ã‚·ãƒ§ãƒ³ã®æœ€çµ‚æ›´æ–°æ—¥ã¯2020å¹´9月ã§ã™ã€‚内容ãŒå»ƒæ­¢ã•れるå¯èƒ½æ€§ãŒã‚ã‚‹ã“ã¨ã‚’考慮ã—ã¦ãã ã•ã„。 + 2. ã“ã®ã‚½ãƒ•トウェアã¯ã‚¢ã‚¸ãƒ£ã‚¤ãƒ«ãªæ–¹æ³•ã§é–‹ç™ºã•れã¦ã„ã‚‹ãŸã‚ã€é–‹ç™ºè¨ˆç”»ã¯ç¶™ç¶šçš„ã«æ¤œè¨Žã•れã¦ã„ã‚‹ã“ã¨ã«æ³¨æ„ã—ã¦ãã ã•ã„。 + ã—ãŸãŒã£ã¦ã€ã“ã®ãƒ­ãƒ¼ãƒ‰ãƒžãƒƒãƒ—ã¯ã€æ™‚é–“ã®çµŒéŽã¨ã¨ã‚‚ã«å®Ÿè¡Œã•れる機能ã®å¤§ã¾ã‹ãªè¨ˆç”»ã¨ã—ã¦ç†è§£ã™ã‚‹å¿…è¦ãŒã‚りã€ãれを + 書ã„ã¦ã„る時点ã§ã®ã¿å®Œå…¨ã«æœ‰åйã§ã™ã€‚ã“ã®ãƒ­ãƒ¼ãƒ‰ãƒžãƒƒãƒ—ã¯ã€æ©Ÿèƒ½ã‚„日付ã«é–¢ã™ã‚‹ã‚³ãƒŸãƒƒãƒˆãƒ¡ãƒ³ãƒˆã¨ã—ã¦ç†è§£ã•れã¦ã„ã¾ã›ã‚“。 + 3. 一部ã®ãƒ­ãƒ¼ãƒ‰ãƒžãƒƒãƒ—é …ç›®ã¯ã€FIWARE Generic Enabler 所有者ã®ç¯„囲外ã§ã€å¤–部ã®ã‚³ãƒŸãƒ¥ãƒ‹ãƒ†ã‚£é–‹ç™ºè€…ã«ã‚ˆã£ã¦å®Ÿè£…ã•れる + å ´åˆãŒã‚りã¾ã™ã€‚ã—ãŸãŒã£ã¦ã€ã“ã‚Œã‚‰ã®æ©Ÿèƒ½ãŒå®Œæˆã™ã‚‹çž¬é–“ã‚’ä¿è¨¼ã™ã‚‹ã“ã¨ã¯ã§ãã¾ã›ã‚“。 + +### 短期 + +æ¬¡ã®æ©Ÿèƒ½ã®ãƒªã‚¹ãƒˆã¯ã€çŸ­æœŸçš„ã«å¯¾å‡¦ã•れる予定ã§ã‚ã‚Šã€æ¬¡ã® FIWARE release ã«å¾“ã£ã¦ã€å¹´æœ«ã¾ãŸã¯2021å¹´ä¸ŠåŠæœŸã«äºˆå®šã•れã¦ã„ã‚‹ +ãƒ—ãƒ­ãƒ€ã‚¯ãƒˆã®æ¬¡ã®ãƒªãƒªãƒ¼ã‚¹ã«çµ„ã¿è¾¼ã¾ã‚Œã‚‹äºˆå®šã§ã™ã€‚ + +1. イシュー追跡ã¨ä¸€èˆ¬çš„ãªãƒã‚°ä¿®æ­£: 既存ã®ãƒã‚°ã‚’引ãç¶šã削除ã—ã€ç¾åœ¨ã®ã‚¤ã‚·ãƒ¥ãƒ¼ã«å¯¾å‡¦ã—ã¾ã™ + - 最å°é™ã®å¿œç­”時間ã¨ã€ã‚¤ã‚·ãƒ¥ãƒ¼ãƒã‚±ãƒƒãƒˆã‚’処ç†ã™ã‚‹ãŸã‚ã®æ˜Žç¢ºã«å®šç¾©ã•ã‚ŒãŸæ§‹é€ ã«ã‚ˆã‚‹ãƒã‚°ã®ã‚¢ã‚¯ãƒ†ã‚£ãƒ–ãªè¿½è·¡ +2. NGSI-LD v1.3.1 ã§ @context ã®å¤‰æ›´ã‚’サãƒãƒ¼ãƒˆ +3. クエリ言語構文ã®å±žæ€§ãƒ‘スã®å¤‰æ›´ (v1.3.1) +4. POST クエリを実装 +5. 使用å¯èƒ½ãªã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£ã‚¿ã‚¤ãƒ—ã¨å±žæ€§ã®ã‚¯ã‚¨ãƒª +6. クエリã§ã®ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£æ•°ã®ã‚µãƒãƒ¼ãƒˆ + +### 中期 + +ä»¥ä¸‹ã®æ©Ÿèƒ½ã®ãƒªã‚¹ãƒˆã¯ã€ä¸­æœŸçš„ã«ã€é€šå¸¸ã€æ¬¡ã®è¨ˆç”»ã•れãŸãƒªãƒªãƒ¼ã‚¹ã‹ã‚‰6ã‹æœˆä»¥å†…ã«ç”Ÿæˆã•れる後続ã®ãƒªãƒªãƒ¼ã‚¹å†…ã§å¯¾å‡¦ã•れる予定 +ã§ã™ã€‚ + +1. NGSI-LD テストスイート + - 時間クエリ (Temporal queries) ã§ãƒ†ã‚¹ãƒˆã‚¹ã‚¤ãƒ¼ãƒˆã‚’æ‹¡å¼µ +2. データベースレベルã§ã®åˆæœŸãƒžãƒ«ãƒãƒ†ãƒŠãƒ³ã‚·ãƒ¼ã‚µãƒãƒ¼ãƒˆ +3. クエリã®ãƒ¬ã‚¹ãƒãƒ³ã‚¹ãŠã‚ˆã³ãƒŽãƒ¼ãƒ†ã‚£ãƒ•ィケーションã¨ã—ã¦å®Œå…¨ãª GeoJSON ドキュメント +4. 属性ãŒã‚¯ã‚¨ãƒªãƒ•ィルターã¾ãŸã¯ã‚¸ã‚ªã‚¯ã‚¨ãƒªã§æš—é»™çš„ã«æŒ‡å®šã•れã¦ã„るクエリã®ã‚µãƒãƒ¼ãƒˆ +5. WebSocket ãƒã‚¤ãƒ³ãƒ‡ã‚£ãƒ³ã‚° (最åˆã¯ã‚µãƒ–スクライブ/ノーティファイインタラクション用) + +### 長期 + +ä»¥ä¸‹ã®æ©Ÿèƒ½ã®ãƒªã‚¹ãƒˆã¯ã€ã“ã‚Œã‚‰ã®æ©Ÿèƒ½ã®é–‹ç™ºãŒè¿‘ã„å°†æ¥ã®ãƒªãƒªãƒ¼ã‚¹ã‚’ã¾ã äºˆå®šã—ã¦ã„ãªã„å ´åˆã§ã‚‚ã€ãƒ—ロダクトã®é•·æœŸçš„ãªé€²åŒ–ã« +é–¢ã™ã‚‹ææ¡ˆã§ã™ã€‚実装ã«å‚加ã—ãŸã‚Šã€ãƒ­ãƒ¼ãƒ‰ãƒžãƒƒãƒ—ã«å½±éŸ¿ã‚’与ãˆãŸã„å ´åˆã¯ã€ãŠæ°—軽ã«ãŠå•ã„åˆã‚ã›ãã ã•ã„。 + +1. エンティティãŠã‚ˆã³/ã¾ãŸã¯å±žæ€§ãƒ¬ãƒ™ãƒ«ã§ã®ã‚¢ã‚¯ã‚»ã‚¹åˆ¶å¾¡ã¨å®Œå…¨ãªãƒžãƒ«ãƒãƒ†ãƒŠãƒ³ã‚·ãƒ¼ã‚µãƒãƒ¼ãƒˆ + - アクセス権管ç†ãªã©ã®ç‰¹å®šã®ã‚»ã‚­ãƒ¥ãƒªãƒ†ã‚£å´é¢ã¸ã®å¯¾å‡¦ã‚’å«ã‚€ãƒžãƒ«ãƒãƒ†ãƒŠãƒ³ãƒˆã®ã‚µãƒãƒ¼ãƒˆã€‚ã“れã«ã¯ã€ã‚¢ã‚¯ã‚»ã‚¹æ¨©ãªã©ã® + 詳細レベルã«ã¤ã„ã¦ã®è­°è«–ãŒå¿…è¦ã«ãªã‚Šã¾ã™ã€‚ +2. @context キャッシュ リクエスター + - @context ã‚’æ ¼ç´ã™ã‚‹ã‚¨ãƒ³ãƒ†ã‚£ãƒ†ã‚£æ“作ã®ä¸€ç¨®ã®ãƒ—ロキシをæä¾› +3. 属性ã®ã‚°ãƒ«ãƒ¼ãƒ—化 (NGSIv1 㮠“Attribute Domains†(属性ドメイン) ã®å¾Œç¶™) +4. NGSI-LD スコープ (æ‹¡å¼µã•れ㟠FIWARE-Service-Path 機能) +5. 時間クエリ言語 (temporal query language) ã§ã®é›†è¨ˆæ¼”ç®—å­ (aggregation operators)ã®ã‚µãƒãƒ¼ãƒˆ +6. MQTT ã®å®Œå…¨ã‚µãƒãƒ¼ãƒˆ + - MQTT 5 ã‚’ã•らã«èª¿ã¹ã¦ã€å®Œå…¨ãª NGSI-LD MQTT ãƒã‚¤ãƒ³ãƒ‡ã‚£ãƒ³ã‚°ã‚’サãƒãƒ¼ãƒˆã™ã‚‹å¯èƒ½æ€§ diff --git a/scorpio-broker/docs/roadmap.md b/scorpio-broker/docs/roadmap.md new file mode 100644 index 0000000000000000000000000000000000000000..8a41ce02f98aad617efe9dce660165f9540226c6 --- /dev/null +++ b/scorpio-broker/docs/roadmap.md @@ -0,0 +1,41 @@ +# Scorpio Broker Roadmap +Scorpio Broker: This product is an Incubated FIWARE Generic Enabler. If you would like to learn about the overall Roadmap of FIWARE, please check section "Roadmap" on the FIWARE Catalogue. The Scorpio Broker implements the NGSI-LD interface as specified by the ETSI Industry Specification Group on Context Information Management. + +## Introduction +This section elaborates on proposed new features or tasks which are expected to be added to the product in the foreseeable future. There should be no assumption of a commitment to deliver these features on specific dates or in the order given. The development team will be doing their best to follow the proposed dates and priorities, but please bear in mind that plans to work on a given feature or task may be revised. All information is provided as a general guidelines only, and this section may be revised to provide newer information at any time. + +Disclaimer: + 1. This section has been last updated in February 2021. Please take into account its content could be obsolete. + 2. Note we develop this software in an agile way, so the development plan is continuously under review. Thus, this roadmap has to be understood as rough plan of features to be done over time and it is fully valid only at the time of writing it. This roadmap has not be understood as a commitment on features and/or dates. + 3. Some of the roadmap items may be implemented by external community developers, out of the scope of GE owners. Thus, the moment in which these features will be finalized cannot be assured. + +### Short Term + +The following list of features are planned to be addressed in the short term, and incorporated in a next release of the product planned for end of the year or first half of 2021, in accordance with the next FIWARE release: +1. Issue tracking & general bugfixing: We will continue to remove existing bugs and adress the current issues + - Active tracking of the bugs with minimal response time and well defined structure to handle the issue tickets +2. Coordinated move towards NGSI-LD v1.3.1 as this partially is breaking data structure and API calls. This merge will include + - Support @context changes in NGSI-LD v1.3.1 + - Query Language Syntax Changes to Attribute Path (v1.3.1) +4. Implement the POST query +5. Work on Kubernetes support + - Scaling support for individual micro services with Kubernetes + - Integration of micro services with Kubernetes services registry and gateway + +### Medium Term +The following list of features are planned to be addressed in the medium term, typically within the subsequent release(s) generated in the next 6 months after the next planned release. +1. Initial multi-tenancy support on database level +2. Support for queries where attribute is implicitly specified in query filter or geoquery +3. Websocket binding (initially for subscribe/notify interaction) + +### Long term +The following list of features are proposals regarding the longer-term evolution of the product even though the development of these features has not yet been scheduled for a release in the near future. Please feel free to contact us if you wish to get involved in the implementation or influence the roadmap: +1. Access control on entity and/or attribute level and complete multitenancy support + - supporting multi tenants which should include addressing certain security aspects like access right management. This will need some discussion on the detail level of access rights etc. +2. @context cache requestor + - providing a kind of a proxy for entity operations which stores @context +3. Grouping of Attributes (successor of “Attribute Domains†from NGSIv1) +4. NGSI-LD Scope (extended FIWARE-Service-Path feature) +5. Support of aggregation operators in temporal query language +6. Full MQTT Support + - Looking further into MQTT 5 to potentially support a full NGSI-LD MQTT binding. diff --git a/scorpio-broker/http_server/app.js b/scorpio-broker/http_server/app.js new file mode 100644 index 0000000000000000000000000000000000000000..56a239f8dee4ad61d4b9b1d41b1e5d98d5c83049 --- /dev/null +++ b/scorpio-broker/http_server/app.js @@ -0,0 +1,16 @@ +const express = require('express'); +const bodyParser = require('body-parser'); +const temporalEntitiesRoutes = require('./routes/TemporalRoutes'); +const entitiesRoutes = require('./routes/EntitiesRoutes'); + +const app = express(); +const PORT = 9010; + +app.use(bodyParser.json()); + +app.use('/ngsi-ld/v1/temporal/entities', temporalEntitiesRoutes); +app.use('/ngsi-ld/v1/entities', entitiesRoutes); + +app.listen(PORT, () => { + console.log(`Server is running on port ${PORT}`); +}); diff --git a/scorpio-broker/http_server/controllers/EntitiesController.js b/scorpio-broker/http_server/controllers/EntitiesController.js new file mode 100644 index 0000000000000000000000000000000000000000..761286927fbf9ca42aebc0b2e4f8bf5f361d0a38 --- /dev/null +++ b/scorpio-broker/http_server/controllers/EntitiesController.js @@ -0,0 +1,13 @@ +class EntitiesController { + async getAll(req, res) { + try { + res.status(200).json({ message: 'Fetched all entities.' }); + } catch (error) { + res.status(500).json({ error: 'An error occurred while fetching entities.' }); + } + } + +} + +module.exports = new EntitiesController(); + diff --git a/scorpio-broker/http_server/controllers/TemporalEntitiesController.js b/scorpio-broker/http_server/controllers/TemporalEntitiesController.js new file mode 100644 index 0000000000000000000000000000000000000000..802cc4b79c034751f16dc406caa4c4b8c392c465 --- /dev/null +++ b/scorpio-broker/http_server/controllers/TemporalEntitiesController.js @@ -0,0 +1,38 @@ +class TemporalEntitiesController { + async getAll(req, res) { + try { + console.log({query:req.query}) + const response = [ + { + id: "urn:ngsi-ld:WeatherObserved:1555", + type: "WeatherObserved", + refDevice: { + type: "Relationship", + object: "urn:ngsi-ld:Device:TBD", + instanceId: "urn:ngsi-ld:3f71a3f3-3922-4523-918e-c835b95b86b7" + }, + location: { + type: "GeoProperty", + value: { + type: "Point", + coordinates: [-4.754444444, 41.640833333] + }, + instanceId: "urn:ngsi-ld:d65b3bd0-062a-4ba4-b48d-f7f91dcbebb9" + }, + "@context": [ + "https://raw.githubusercontent.com/smart-data-models/dataModel.Weather/master/context.jsonld", + "https://uri.etsi.org/ngsi-ld/v1/ngsi-ld-core-context.jsonld" + ] + } + ]; + console.log({"hi":response}) + res.status(200).json(response); + } catch (error) { + res.status(500).json({ error: 'An error occurred while fetching temporal entities.' }); + } + } + +} + +module.exports = new TemporalEntitiesController(); + diff --git a/scorpio-broker/http_server/node_modules/.bin/mime b/scorpio-broker/http_server/node_modules/.bin/mime new file mode 120000 index 0000000000000000000000000000000000000000..fbb7ee0eed8d1dd0fe3b5a9d6ff41d1c4f044675 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/.bin/mime @@ -0,0 +1 @@ +../mime/cli.js \ No newline at end of file diff --git a/scorpio-broker/http_server/node_modules/.package-lock.json b/scorpio-broker/http_server/node_modules/.package-lock.json new file mode 100644 index 0000000000000000000000000000000000000000..11e8682972710e13224ec6b59c71b4ed92dcee75 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/.package-lock.json @@ -0,0 +1,756 @@ +{ + "name": "http_server", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "node_modules/accepts": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", + "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" + }, + "node_modules/body-parser": { + "version": "1.20.3", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", + "integrity": "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g==", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz", + "integrity": "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.3.tgz", + "integrity": "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" + }, + "node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/express": { + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.7.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.3.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.12", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.7.tgz", + "integrity": "sha512-VW6Pxhsrk0KAOqs3WEd0klDiF/+V7gQOpAvY1jVU/LHmaD/kQO4523aiJuikX/QAKYiW6x8Jh+RJej1almdtCA==", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "function-bind": "^1.1.2", + "get-proto": "^1.0.0", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/http-errors": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", + "integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/negotiator": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/object-inspect": { + "version": "1.13.3", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.3.tgz", + "integrity": "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-to-regexp": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==" + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", + "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/qs": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "node_modules/send": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", + "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/send/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/serve-static": { + "version": "1.16.2", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", + "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", + "dependencies": { + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + }, + "node_modules/side-channel": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz", + "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz", + "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz", + "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz", + "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "engines": { + "node": ">= 0.8" + } + } + } +} diff --git a/scorpio-broker/http_server/node_modules/accepts/HISTORY.md b/scorpio-broker/http_server/node_modules/accepts/HISTORY.md new file mode 100644 index 0000000000000000000000000000000000000000..cb5990c7c3620f4936a3ac42b3bf335c95eef7e8 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/accepts/HISTORY.md @@ -0,0 +1,243 @@ +1.3.8 / 2022-02-02 +================== + + * deps: mime-types@~2.1.34 + - deps: mime-db@~1.51.0 + * deps: negotiator@0.6.3 + +1.3.7 / 2019-04-29 +================== + + * deps: negotiator@0.6.2 + - Fix sorting charset, encoding, and language with extra parameters + +1.3.6 / 2019-04-28 +================== + + * deps: mime-types@~2.1.24 + - deps: mime-db@~1.40.0 + +1.3.5 / 2018-02-28 +================== + + * deps: mime-types@~2.1.18 + - deps: mime-db@~1.33.0 + +1.3.4 / 2017-08-22 +================== + + * deps: mime-types@~2.1.16 + - deps: mime-db@~1.29.0 + +1.3.3 / 2016-05-02 +================== + + * deps: mime-types@~2.1.11 + - deps: mime-db@~1.23.0 + * deps: negotiator@0.6.1 + - perf: improve `Accept` parsing speed + - perf: improve `Accept-Charset` parsing speed + - perf: improve `Accept-Encoding` parsing speed + - perf: improve `Accept-Language` parsing speed + +1.3.2 / 2016-03-08 +================== + + * deps: mime-types@~2.1.10 + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + - deps: mime-db@~1.22.0 + +1.3.1 / 2016-01-19 +================== + + * deps: mime-types@~2.1.9 + - deps: mime-db@~1.21.0 + +1.3.0 / 2015-09-29 +================== + + * deps: mime-types@~2.1.7 + - deps: mime-db@~1.19.0 + * deps: negotiator@0.6.0 + - Fix including type extensions in parameters in `Accept` parsing + - Fix parsing `Accept` parameters with quoted equals + - Fix parsing `Accept` parameters with quoted semicolons + - Lazy-load modules from main entry point + - perf: delay type concatenation until needed + - perf: enable strict mode + - perf: hoist regular expressions + - perf: remove closures getting spec properties + - perf: remove a closure from media type parsing + - perf: remove property delete from media type parsing + +1.2.13 / 2015-09-06 +=================== + + * deps: mime-types@~2.1.6 + - deps: mime-db@~1.18.0 + +1.2.12 / 2015-07-30 +=================== + + * deps: mime-types@~2.1.4 + - deps: mime-db@~1.16.0 + +1.2.11 / 2015-07-16 +=================== + + * deps: mime-types@~2.1.3 + - deps: mime-db@~1.15.0 + +1.2.10 / 2015-07-01 +=================== + + * deps: mime-types@~2.1.2 + - deps: mime-db@~1.14.0 + +1.2.9 / 2015-06-08 +================== + + * deps: mime-types@~2.1.1 + - perf: fix deopt during mapping + +1.2.8 / 2015-06-07 +================== + + * deps: mime-types@~2.1.0 + - deps: mime-db@~1.13.0 + * perf: avoid argument reassignment & argument slice + * perf: avoid negotiator recursive construction + * perf: enable strict mode + * perf: remove unnecessary bitwise operator + +1.2.7 / 2015-05-10 +================== + + * deps: negotiator@0.5.3 + - Fix media type parameter matching to be case-insensitive + +1.2.6 / 2015-05-07 +================== + + * deps: mime-types@~2.0.11 + - deps: mime-db@~1.9.1 + * deps: negotiator@0.5.2 + - Fix comparing media types with quoted values + - Fix splitting media types with quoted commas + +1.2.5 / 2015-03-13 +================== + + * deps: mime-types@~2.0.10 + - deps: mime-db@~1.8.0 + +1.2.4 / 2015-02-14 +================== + + * Support Node.js 0.6 + * deps: mime-types@~2.0.9 + - deps: mime-db@~1.7.0 + * deps: negotiator@0.5.1 + - Fix preference sorting to be stable for long acceptable lists + +1.2.3 / 2015-01-31 +================== + + * deps: mime-types@~2.0.8 + - deps: mime-db@~1.6.0 + +1.2.2 / 2014-12-30 +================== + + * deps: mime-types@~2.0.7 + - deps: mime-db@~1.5.0 + +1.2.1 / 2014-12-30 +================== + + * deps: mime-types@~2.0.5 + - deps: mime-db@~1.3.1 + +1.2.0 / 2014-12-19 +================== + + * deps: negotiator@0.5.0 + - Fix list return order when large accepted list + - Fix missing identity encoding when q=0 exists + - Remove dynamic building of Negotiator class + +1.1.4 / 2014-12-10 +================== + + * deps: mime-types@~2.0.4 + - deps: mime-db@~1.3.0 + +1.1.3 / 2014-11-09 +================== + + * deps: mime-types@~2.0.3 + - deps: mime-db@~1.2.0 + +1.1.2 / 2014-10-14 +================== + + * deps: negotiator@0.4.9 + - Fix error when media type has invalid parameter + +1.1.1 / 2014-09-28 +================== + + * deps: mime-types@~2.0.2 + - deps: mime-db@~1.1.0 + * deps: negotiator@0.4.8 + - Fix all negotiations to be case-insensitive + - Stable sort preferences of same quality according to client order + +1.1.0 / 2014-09-02 +================== + + * update `mime-types` + +1.0.7 / 2014-07-04 +================== + + * Fix wrong type returned from `type` when match after unknown extension + +1.0.6 / 2014-06-24 +================== + + * deps: negotiator@0.4.7 + +1.0.5 / 2014-06-20 +================== + + * fix crash when unknown extension given + +1.0.4 / 2014-06-19 +================== + + * use `mime-types` + +1.0.3 / 2014-06-11 +================== + + * deps: negotiator@0.4.6 + - Order by specificity when quality is the same + +1.0.2 / 2014-05-29 +================== + + * Fix interpretation when header not in request + * deps: pin negotiator@0.4.5 + +1.0.1 / 2014-01-18 +================== + + * Identity encoding isn't always acceptable + * deps: negotiator@~0.4.0 + +1.0.0 / 2013-12-27 +================== + + * Genesis diff --git a/scorpio-broker/http_server/node_modules/accepts/LICENSE b/scorpio-broker/http_server/node_modules/accepts/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..06166077be4d1f620d89b9eb33c76d89e75857da --- /dev/null +++ b/scorpio-broker/http_server/node_modules/accepts/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/scorpio-broker/http_server/node_modules/accepts/README.md b/scorpio-broker/http_server/node_modules/accepts/README.md new file mode 100644 index 0000000000000000000000000000000000000000..82680c530c3886540f630f643990e2ec707319d1 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/accepts/README.md @@ -0,0 +1,140 @@ +# accepts + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][github-actions-ci-image]][github-actions-ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Higher level content negotiation based on [negotiator](https://www.npmjs.com/package/negotiator). +Extracted from [koa](https://www.npmjs.com/package/koa) for general use. + +In addition to negotiator, it allows: + +- Allows types as an array or arguments list, ie `(['text/html', 'application/json'])` + as well as `('text/html', 'application/json')`. +- Allows type shorthands such as `json`. +- Returns `false` when no types match +- Treats non-existent headers as `*` + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install accepts +``` + +## API + +```js +var accepts = require('accepts') +``` + +### accepts(req) + +Create a new `Accepts` object for the given `req`. + +#### .charset(charsets) + +Return the first accepted charset. If nothing in `charsets` is accepted, +then `false` is returned. + +#### .charsets() + +Return the charsets that the request accepts, in the order of the client's +preference (most preferred first). + +#### .encoding(encodings) + +Return the first accepted encoding. If nothing in `encodings` is accepted, +then `false` is returned. + +#### .encodings() + +Return the encodings that the request accepts, in the order of the client's +preference (most preferred first). + +#### .language(languages) + +Return the first accepted language. If nothing in `languages` is accepted, +then `false` is returned. + +#### .languages() + +Return the languages that the request accepts, in the order of the client's +preference (most preferred first). + +#### .type(types) + +Return the first accepted type (and it is returned as the same text as what +appears in the `types` array). If nothing in `types` is accepted, then `false` +is returned. + +The `types` array can contain full MIME types or file extensions. Any value +that is not a full MIME types is passed to `require('mime-types').lookup`. + +#### .types() + +Return the types that the request accepts, in the order of the client's +preference (most preferred first). + +## Examples + +### Simple type negotiation + +This simple example shows how to use `accepts` to return a different typed +respond body based on what the client wants to accept. The server lists it's +preferences in order and will get back the best match between the client and +server. + +```js +var accepts = require('accepts') +var http = require('http') + +function app (req, res) { + var accept = accepts(req) + + // the order of this list is significant; should be server preferred order + switch (accept.type(['json', 'html'])) { + case 'json': + res.setHeader('Content-Type', 'application/json') + res.write('{"hello":"world!"}') + break + case 'html': + res.setHeader('Content-Type', 'text/html') + res.write('hello, world!') + break + default: + // the fallback is text/plain, so no need to specify it above + res.setHeader('Content-Type', 'text/plain') + res.write('hello, world!') + break + } + + res.end() +} + +http.createServer(app).listen(3000) +``` + +You can test this out with the cURL program: +```sh +curl -I -H'Accept: text/html' http://localhost:3000/ +``` + +## License + +[MIT](LICENSE) + +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/accepts/master +[coveralls-url]: https://coveralls.io/r/jshttp/accepts?branch=master +[github-actions-ci-image]: https://badgen.net/github/checks/jshttp/accepts/master?label=ci +[github-actions-ci-url]: https://github.com/jshttp/accepts/actions/workflows/ci.yml +[node-version-image]: https://badgen.net/npm/node/accepts +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/accepts +[npm-url]: https://npmjs.org/package/accepts +[npm-version-image]: https://badgen.net/npm/v/accepts diff --git a/scorpio-broker/http_server/node_modules/accepts/index.js b/scorpio-broker/http_server/node_modules/accepts/index.js new file mode 100644 index 0000000000000000000000000000000000000000..e9b2f63fb16f8ecdeb16c8eced302612794ccf65 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/accepts/index.js @@ -0,0 +1,238 @@ +/*! + * accepts + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var Negotiator = require('negotiator') +var mime = require('mime-types') + +/** + * Module exports. + * @public + */ + +module.exports = Accepts + +/** + * Create a new Accepts object for the given req. + * + * @param {object} req + * @public + */ + +function Accepts (req) { + if (!(this instanceof Accepts)) { + return new Accepts(req) + } + + this.headers = req.headers + this.negotiator = new Negotiator(req) +} + +/** + * Check if the given `type(s)` is acceptable, returning + * the best match when true, otherwise `undefined`, in which + * case you should respond with 406 "Not Acceptable". + * + * The `type` value may be a single mime type string + * such as "application/json", the extension name + * such as "json" or an array `["json", "html", "text/plain"]`. When a list + * or array is given the _best_ match, if any is returned. + * + * Examples: + * + * // Accept: text/html + * this.types('html'); + * // => "html" + * + * // Accept: text/*, application/json + * this.types('html'); + * // => "html" + * this.types('text/html'); + * // => "text/html" + * this.types('json', 'text'); + * // => "json" + * this.types('application/json'); + * // => "application/json" + * + * // Accept: text/*, application/json + * this.types('image/png'); + * this.types('png'); + * // => undefined + * + * // Accept: text/*;q=.5, application/json + * this.types(['html', 'json']); + * this.types('html', 'json'); + * // => "json" + * + * @param {String|Array} types... + * @return {String|Array|Boolean} + * @public + */ + +Accepts.prototype.type = +Accepts.prototype.types = function (types_) { + var types = types_ + + // support flattened arguments + if (types && !Array.isArray(types)) { + types = new Array(arguments.length) + for (var i = 0; i < types.length; i++) { + types[i] = arguments[i] + } + } + + // no types, return all requested types + if (!types || types.length === 0) { + return this.negotiator.mediaTypes() + } + + // no accept header, return first given type + if (!this.headers.accept) { + return types[0] + } + + var mimes = types.map(extToMime) + var accepts = this.negotiator.mediaTypes(mimes.filter(validMime)) + var first = accepts[0] + + return first + ? types[mimes.indexOf(first)] + : false +} + +/** + * Return accepted encodings or best fit based on `encodings`. + * + * Given `Accept-Encoding: gzip, deflate` + * an array sorted by quality is returned: + * + * ['gzip', 'deflate'] + * + * @param {String|Array} encodings... + * @return {String|Array} + * @public + */ + +Accepts.prototype.encoding = +Accepts.prototype.encodings = function (encodings_) { + var encodings = encodings_ + + // support flattened arguments + if (encodings && !Array.isArray(encodings)) { + encodings = new Array(arguments.length) + for (var i = 0; i < encodings.length; i++) { + encodings[i] = arguments[i] + } + } + + // no encodings, return all requested encodings + if (!encodings || encodings.length === 0) { + return this.negotiator.encodings() + } + + return this.negotiator.encodings(encodings)[0] || false +} + +/** + * Return accepted charsets or best fit based on `charsets`. + * + * Given `Accept-Charset: utf-8, iso-8859-1;q=0.2, utf-7;q=0.5` + * an array sorted by quality is returned: + * + * ['utf-8', 'utf-7', 'iso-8859-1'] + * + * @param {String|Array} charsets... + * @return {String|Array} + * @public + */ + +Accepts.prototype.charset = +Accepts.prototype.charsets = function (charsets_) { + var charsets = charsets_ + + // support flattened arguments + if (charsets && !Array.isArray(charsets)) { + charsets = new Array(arguments.length) + for (var i = 0; i < charsets.length; i++) { + charsets[i] = arguments[i] + } + } + + // no charsets, return all requested charsets + if (!charsets || charsets.length === 0) { + return this.negotiator.charsets() + } + + return this.negotiator.charsets(charsets)[0] || false +} + +/** + * Return accepted languages or best fit based on `langs`. + * + * Given `Accept-Language: en;q=0.8, es, pt` + * an array sorted by quality is returned: + * + * ['es', 'pt', 'en'] + * + * @param {String|Array} langs... + * @return {Array|String} + * @public + */ + +Accepts.prototype.lang = +Accepts.prototype.langs = +Accepts.prototype.language = +Accepts.prototype.languages = function (languages_) { + var languages = languages_ + + // support flattened arguments + if (languages && !Array.isArray(languages)) { + languages = new Array(arguments.length) + for (var i = 0; i < languages.length; i++) { + languages[i] = arguments[i] + } + } + + // no languages, return all requested languages + if (!languages || languages.length === 0) { + return this.negotiator.languages() + } + + return this.negotiator.languages(languages)[0] || false +} + +/** + * Convert extnames to mime. + * + * @param {String} type + * @return {String} + * @private + */ + +function extToMime (type) { + return type.indexOf('/') === -1 + ? mime.lookup(type) + : type +} + +/** + * Check if mime is valid. + * + * @param {String} type + * @return {String} + * @private + */ + +function validMime (type) { + return typeof type === 'string' +} diff --git a/scorpio-broker/http_server/node_modules/accepts/package.json b/scorpio-broker/http_server/node_modules/accepts/package.json new file mode 100644 index 0000000000000000000000000000000000000000..0f2d15da92b29d328f4da484f494c5442c711b4d --- /dev/null +++ b/scorpio-broker/http_server/node_modules/accepts/package.json @@ -0,0 +1,47 @@ +{ + "name": "accepts", + "description": "Higher-level content negotiation", + "version": "1.3.8", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "jshttp/accepts", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "devDependencies": { + "deep-equal": "1.0.1", + "eslint": "7.32.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.25.4", + "eslint-plugin-markdown": "2.2.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "4.3.1", + "eslint-plugin-standard": "4.1.0", + "mocha": "9.2.0", + "nyc": "15.1.0" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + }, + "keywords": [ + "content", + "negotiation", + "accept", + "accepts" + ] +} diff --git a/scorpio-broker/http_server/node_modules/array-flatten/LICENSE b/scorpio-broker/http_server/node_modules/array-flatten/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..983fbe8aec3f4e2d4add592bb1083b00d7366f66 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/array-flatten/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/scorpio-broker/http_server/node_modules/array-flatten/README.md b/scorpio-broker/http_server/node_modules/array-flatten/README.md new file mode 100644 index 0000000000000000000000000000000000000000..91fa5b637ec2d2a492d6b5c4bf9ba2e76ff2f352 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/array-flatten/README.md @@ -0,0 +1,43 @@ +# Array Flatten + +[![NPM version][npm-image]][npm-url] +[![NPM downloads][downloads-image]][downloads-url] +[![Build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] + +> Flatten an array of nested arrays into a single flat array. Accepts an optional depth. + +## Installation + +``` +npm install array-flatten --save +``` + +## Usage + +```javascript +var flatten = require('array-flatten') + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9]) +//=> [1, 2, 3, 4, 5, 6, 7, 8, 9] + +flatten([1, [2, [3, [4, [5], 6], 7], 8], 9], 2) +//=> [1, 2, 3, [4, [5], 6], 7, 8, 9] + +(function () { + flatten(arguments) //=> [1, 2, 3] +})(1, [2, 3]) +``` + +## License + +MIT + +[npm-image]: https://img.shields.io/npm/v/array-flatten.svg?style=flat +[npm-url]: https://npmjs.org/package/array-flatten +[downloads-image]: https://img.shields.io/npm/dm/array-flatten.svg?style=flat +[downloads-url]: https://npmjs.org/package/array-flatten +[travis-image]: https://img.shields.io/travis/blakeembrey/array-flatten.svg?style=flat +[travis-url]: https://travis-ci.org/blakeembrey/array-flatten +[coveralls-image]: https://img.shields.io/coveralls/blakeembrey/array-flatten.svg?style=flat +[coveralls-url]: https://coveralls.io/r/blakeembrey/array-flatten?branch=master diff --git a/scorpio-broker/http_server/node_modules/array-flatten/array-flatten.js b/scorpio-broker/http_server/node_modules/array-flatten/array-flatten.js new file mode 100644 index 0000000000000000000000000000000000000000..089117b322f5857b8bb6bccf7a659686aca067c0 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/array-flatten/array-flatten.js @@ -0,0 +1,64 @@ +'use strict' + +/** + * Expose `arrayFlatten`. + */ +module.exports = arrayFlatten + +/** + * Recursive flatten function with depth. + * + * @param {Array} array + * @param {Array} result + * @param {Number} depth + * @return {Array} + */ +function flattenWithDepth (array, result, depth) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (depth > 0 && Array.isArray(value)) { + flattenWithDepth(value, result, depth - 1) + } else { + result.push(value) + } + } + + return result +} + +/** + * Recursive flatten function. Omitting depth is slightly faster. + * + * @param {Array} array + * @param {Array} result + * @return {Array} + */ +function flattenForever (array, result) { + for (var i = 0; i < array.length; i++) { + var value = array[i] + + if (Array.isArray(value)) { + flattenForever(value, result) + } else { + result.push(value) + } + } + + return result +} + +/** + * Flatten an array, with the ability to define a depth. + * + * @param {Array} array + * @param {Number} depth + * @return {Array} + */ +function arrayFlatten (array, depth) { + if (depth == null) { + return flattenForever(array, []) + } + + return flattenWithDepth(array, [], depth) +} diff --git a/scorpio-broker/http_server/node_modules/array-flatten/package.json b/scorpio-broker/http_server/node_modules/array-flatten/package.json new file mode 100644 index 0000000000000000000000000000000000000000..1a24e2a1a1d3fbd694b77bf6673ab1e1c2fd5043 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/array-flatten/package.json @@ -0,0 +1,39 @@ +{ + "name": "array-flatten", + "version": "1.1.1", + "description": "Flatten an array of nested arrays into a single flat array", + "main": "array-flatten.js", + "files": [ + "array-flatten.js", + "LICENSE" + ], + "scripts": { + "test": "istanbul cover _mocha -- -R spec" + }, + "repository": { + "type": "git", + "url": "git://github.com/blakeembrey/array-flatten.git" + }, + "keywords": [ + "array", + "flatten", + "arguments", + "depth" + ], + "author": { + "name": "Blake Embrey", + "email": "hello@blakeembrey.com", + "url": "http://blakeembrey.me" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/blakeembrey/array-flatten/issues" + }, + "homepage": "https://github.com/blakeembrey/array-flatten", + "devDependencies": { + "istanbul": "^0.3.13", + "mocha": "^2.2.4", + "pre-commit": "^1.0.7", + "standard": "^3.7.3" + } +} diff --git a/scorpio-broker/http_server/node_modules/body-parser/HISTORY.md b/scorpio-broker/http_server/node_modules/body-parser/HISTORY.md new file mode 100644 index 0000000000000000000000000000000000000000..81d23e064d55249173e4dab35f1153a3a34ab644 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/body-parser/HISTORY.md @@ -0,0 +1,672 @@ +1.20.3 / 2024-09-10 +=================== + + * deps: qs@6.13.0 + * add `depth` option to customize the depth level in the parser + * IMPORTANT: The default `depth` level for parsing URL-encoded data is now `32` (previously was `Infinity`) + +1.20.2 / 2023-02-21 +=================== + + * Fix strict json error message on Node.js 19+ + * deps: content-type@~1.0.5 + - perf: skip value escaping when unnecessary + * deps: raw-body@2.5.2 + +1.20.1 / 2022-10-06 +=================== + + * deps: qs@6.11.0 + * perf: remove unnecessary object clone + +1.20.0 / 2022-04-02 +=================== + + * Fix error message for json parse whitespace in `strict` + * Fix internal error when inflated body exceeds limit + * Prevent loss of async hooks context + * Prevent hanging when request already read + * deps: depd@2.0.0 + - Replace internal `eval` usage with `Function` constructor + - Use instance methods on `process` to check for listeners + * deps: http-errors@2.0.0 + - deps: depd@2.0.0 + - deps: statuses@2.0.1 + * deps: on-finished@2.4.1 + * deps: qs@6.10.3 + * deps: raw-body@2.5.1 + - deps: http-errors@2.0.0 + +1.19.2 / 2022-02-15 +=================== + + * deps: bytes@3.1.2 + * deps: qs@6.9.7 + * Fix handling of `__proto__` keys + * deps: raw-body@2.4.3 + - deps: bytes@3.1.2 + +1.19.1 / 2021-12-10 +=================== + + * deps: bytes@3.1.1 + * deps: http-errors@1.8.1 + - deps: inherits@2.0.4 + - deps: toidentifier@1.0.1 + - deps: setprototypeof@1.2.0 + * deps: qs@6.9.6 + * deps: raw-body@2.4.2 + - deps: bytes@3.1.1 + - deps: http-errors@1.8.1 + * deps: safe-buffer@5.2.1 + * deps: type-is@~1.6.18 + +1.19.0 / 2019-04-25 +=================== + + * deps: bytes@3.1.0 + - Add petabyte (`pb`) support + * deps: http-errors@1.7.2 + - Set constructor name when possible + - deps: setprototypeof@1.1.1 + - deps: statuses@'>= 1.5.0 < 2' + * deps: iconv-lite@0.4.24 + - Added encoding MIK + * deps: qs@6.7.0 + - Fix parsing array brackets after index + * deps: raw-body@2.4.0 + - deps: bytes@3.1.0 + - deps: http-errors@1.7.2 + - deps: iconv-lite@0.4.24 + * deps: type-is@~1.6.17 + - deps: mime-types@~2.1.24 + - perf: prevent internal `throw` on invalid type + +1.18.3 / 2018-05-14 +=================== + + * Fix stack trace for strict json parse error + * deps: depd@~1.1.2 + - perf: remove argument reassignment + * deps: http-errors@~1.6.3 + - deps: depd@~1.1.2 + - deps: setprototypeof@1.1.0 + - deps: statuses@'>= 1.3.1 < 2' + * deps: iconv-lite@0.4.23 + - Fix loading encoding with year appended + - Fix deprecation warnings on Node.js 10+ + * deps: qs@6.5.2 + * deps: raw-body@2.3.3 + - deps: http-errors@1.6.3 + - deps: iconv-lite@0.4.23 + * deps: type-is@~1.6.16 + - deps: mime-types@~2.1.18 + +1.18.2 / 2017-09-22 +=================== + + * deps: debug@2.6.9 + * perf: remove argument reassignment + +1.18.1 / 2017-09-12 +=================== + + * deps: content-type@~1.0.4 + - perf: remove argument reassignment + - perf: skip parameter parsing when no parameters + * deps: iconv-lite@0.4.19 + - Fix ISO-8859-1 regression + - Update Windows-1255 + * deps: qs@6.5.1 + - Fix parsing & compacting very deep objects + * deps: raw-body@2.3.2 + - deps: iconv-lite@0.4.19 + +1.18.0 / 2017-09-08 +=================== + + * Fix JSON strict violation error to match native parse error + * Include the `body` property on verify errors + * Include the `type` property on all generated errors + * Use `http-errors` to set status code on errors + * deps: bytes@3.0.0 + * deps: debug@2.6.8 + * deps: depd@~1.1.1 + - Remove unnecessary `Buffer` loading + * deps: http-errors@~1.6.2 + - deps: depd@1.1.1 + * deps: iconv-lite@0.4.18 + - Add support for React Native + - Add a warning if not loaded as utf-8 + - Fix CESU-8 decoding in Node.js 8 + - Improve speed of ISO-8859-1 encoding + * deps: qs@6.5.0 + * deps: raw-body@2.3.1 + - Use `http-errors` for standard emitted errors + - deps: bytes@3.0.0 + - deps: iconv-lite@0.4.18 + - perf: skip buffer decoding on overage chunk + * perf: prevent internal `throw` when missing charset + +1.17.2 / 2017-05-17 +=================== + + * deps: debug@2.6.7 + - Fix `DEBUG_MAX_ARRAY_LENGTH` + - deps: ms@2.0.0 + * deps: type-is@~1.6.15 + - deps: mime-types@~2.1.15 + +1.17.1 / 2017-03-06 +=================== + + * deps: qs@6.4.0 + - Fix regression parsing keys starting with `[` + +1.17.0 / 2017-03-01 +=================== + + * deps: http-errors@~1.6.1 + - Make `message` property enumerable for `HttpError`s + - deps: setprototypeof@1.0.3 + * deps: qs@6.3.1 + - Fix compacting nested arrays + +1.16.1 / 2017-02-10 +=================== + + * deps: debug@2.6.1 + - Fix deprecation messages in WebStorm and other editors + - Undeprecate `DEBUG_FD` set to `1` or `2` + +1.16.0 / 2017-01-17 +=================== + + * deps: debug@2.6.0 + - Allow colors in workers + - Deprecated `DEBUG_FD` environment variable + - Fix error when running under React Native + - Use same color for same namespace + - deps: ms@0.7.2 + * deps: http-errors@~1.5.1 + - deps: inherits@2.0.3 + - deps: setprototypeof@1.0.2 + - deps: statuses@'>= 1.3.1 < 2' + * deps: iconv-lite@0.4.15 + - Added encoding MS-31J + - Added encoding MS-932 + - Added encoding MS-936 + - Added encoding MS-949 + - Added encoding MS-950 + - Fix GBK/GB18030 handling of Euro character + * deps: qs@6.2.1 + - Fix array parsing from skipping empty values + * deps: raw-body@~2.2.0 + - deps: iconv-lite@0.4.15 + * deps: type-is@~1.6.14 + - deps: mime-types@~2.1.13 + +1.15.2 / 2016-06-19 +=================== + + * deps: bytes@2.4.0 + * deps: content-type@~1.0.2 + - perf: enable strict mode + * deps: http-errors@~1.5.0 + - Use `setprototypeof` module to replace `__proto__` setting + - deps: statuses@'>= 1.3.0 < 2' + - perf: enable strict mode + * deps: qs@6.2.0 + * deps: raw-body@~2.1.7 + - deps: bytes@2.4.0 + - perf: remove double-cleanup on happy path + * deps: type-is@~1.6.13 + - deps: mime-types@~2.1.11 + +1.15.1 / 2016-05-05 +=================== + + * deps: bytes@2.3.0 + - Drop partial bytes on all parsed units + - Fix parsing byte string that looks like hex + * deps: raw-body@~2.1.6 + - deps: bytes@2.3.0 + * deps: type-is@~1.6.12 + - deps: mime-types@~2.1.10 + +1.15.0 / 2016-02-10 +=================== + + * deps: http-errors@~1.4.0 + - Add `HttpError` export, for `err instanceof createError.HttpError` + - deps: inherits@2.0.1 + - deps: statuses@'>= 1.2.1 < 2' + * deps: qs@6.1.0 + * deps: type-is@~1.6.11 + - deps: mime-types@~2.1.9 + +1.14.2 / 2015-12-16 +=================== + + * deps: bytes@2.2.0 + * deps: iconv-lite@0.4.13 + * deps: qs@5.2.0 + * deps: raw-body@~2.1.5 + - deps: bytes@2.2.0 + - deps: iconv-lite@0.4.13 + * deps: type-is@~1.6.10 + - deps: mime-types@~2.1.8 + +1.14.1 / 2015-09-27 +=================== + + * Fix issue where invalid charset results in 400 when `verify` used + * deps: iconv-lite@0.4.12 + - Fix CESU-8 decoding in Node.js 4.x + * deps: raw-body@~2.1.4 + - Fix masking critical errors from `iconv-lite` + - deps: iconv-lite@0.4.12 + * deps: type-is@~1.6.9 + - deps: mime-types@~2.1.7 + +1.14.0 / 2015-09-16 +=================== + + * Fix JSON strict parse error to match syntax errors + * Provide static `require` analysis in `urlencoded` parser + * deps: depd@~1.1.0 + - Support web browser loading + * deps: qs@5.1.0 + * deps: raw-body@~2.1.3 + - Fix sync callback when attaching data listener causes sync read + * deps: type-is@~1.6.8 + - Fix type error when given invalid type to match against + - deps: mime-types@~2.1.6 + +1.13.3 / 2015-07-31 +=================== + + * deps: type-is@~1.6.6 + - deps: mime-types@~2.1.4 + +1.13.2 / 2015-07-05 +=================== + + * deps: iconv-lite@0.4.11 + * deps: qs@4.0.0 + - Fix dropping parameters like `hasOwnProperty` + - Fix user-visible incompatibilities from 3.1.0 + - Fix various parsing edge cases + * deps: raw-body@~2.1.2 + - Fix error stack traces to skip `makeError` + - deps: iconv-lite@0.4.11 + * deps: type-is@~1.6.4 + - deps: mime-types@~2.1.2 + - perf: enable strict mode + - perf: remove argument reassignment + +1.13.1 / 2015-06-16 +=================== + + * deps: qs@2.4.2 + - Downgraded from 3.1.0 because of user-visible incompatibilities + +1.13.0 / 2015-06-14 +=================== + + * Add `statusCode` property on `Error`s, in addition to `status` + * Change `type` default to `application/json` for JSON parser + * Change `type` default to `application/x-www-form-urlencoded` for urlencoded parser + * Provide static `require` analysis + * Use the `http-errors` module to generate errors + * deps: bytes@2.1.0 + - Slight optimizations + * deps: iconv-lite@0.4.10 + - The encoding UTF-16 without BOM now defaults to UTF-16LE when detection fails + - Leading BOM is now removed when decoding + * deps: on-finished@~2.3.0 + - Add defined behavior for HTTP `CONNECT` requests + - Add defined behavior for HTTP `Upgrade` requests + - deps: ee-first@1.1.1 + * deps: qs@3.1.0 + - Fix dropping parameters like `hasOwnProperty` + - Fix various parsing edge cases + - Parsed object now has `null` prototype + * deps: raw-body@~2.1.1 + - Use `unpipe` module for unpiping requests + - deps: iconv-lite@0.4.10 + * deps: type-is@~1.6.3 + - deps: mime-types@~2.1.1 + - perf: reduce try block size + - perf: remove bitwise operations + * perf: enable strict mode + * perf: remove argument reassignment + * perf: remove delete call + +1.12.4 / 2015-05-10 +=================== + + * deps: debug@~2.2.0 + * deps: qs@2.4.2 + - Fix allowing parameters like `constructor` + * deps: on-finished@~2.2.1 + * deps: raw-body@~2.0.1 + - Fix a false-positive when unpiping in Node.js 0.8 + - deps: bytes@2.0.1 + * deps: type-is@~1.6.2 + - deps: mime-types@~2.0.11 + +1.12.3 / 2015-04-15 +=================== + + * Slight efficiency improvement when not debugging + * deps: depd@~1.0.1 + * deps: iconv-lite@0.4.8 + - Add encoding alias UNICODE-1-1-UTF-7 + * deps: raw-body@1.3.4 + - Fix hanging callback if request aborts during read + - deps: iconv-lite@0.4.8 + +1.12.2 / 2015-03-16 +=================== + + * deps: qs@2.4.1 + - Fix error when parameter `hasOwnProperty` is present + +1.12.1 / 2015-03-15 +=================== + + * deps: debug@~2.1.3 + - Fix high intensity foreground color for bold + - deps: ms@0.7.0 + * deps: type-is@~1.6.1 + - deps: mime-types@~2.0.10 + +1.12.0 / 2015-02-13 +=================== + + * add `debug` messages + * accept a function for the `type` option + * use `content-type` to parse `Content-Type` headers + * deps: iconv-lite@0.4.7 + - Gracefully support enumerables on `Object.prototype` + * deps: raw-body@1.3.3 + - deps: iconv-lite@0.4.7 + * deps: type-is@~1.6.0 + - fix argument reassignment + - fix false-positives in `hasBody` `Transfer-Encoding` check + - support wildcard for both type and subtype (`*/*`) + - deps: mime-types@~2.0.9 + +1.11.0 / 2015-01-30 +=================== + + * make internal `extended: true` depth limit infinity + * deps: type-is@~1.5.6 + - deps: mime-types@~2.0.8 + +1.10.2 / 2015-01-20 +=================== + + * deps: iconv-lite@0.4.6 + - Fix rare aliases of single-byte encodings + * deps: raw-body@1.3.2 + - deps: iconv-lite@0.4.6 + +1.10.1 / 2015-01-01 +=================== + + * deps: on-finished@~2.2.0 + * deps: type-is@~1.5.5 + - deps: mime-types@~2.0.7 + +1.10.0 / 2014-12-02 +=================== + + * make internal `extended: true` array limit dynamic + +1.9.3 / 2014-11-21 +================== + + * deps: iconv-lite@0.4.5 + - Fix Windows-31J and X-SJIS encoding support + * deps: qs@2.3.3 + - Fix `arrayLimit` behavior + * deps: raw-body@1.3.1 + - deps: iconv-lite@0.4.5 + * deps: type-is@~1.5.3 + - deps: mime-types@~2.0.3 + +1.9.2 / 2014-10-27 +================== + + * deps: qs@2.3.2 + - Fix parsing of mixed objects and values + +1.9.1 / 2014-10-22 +================== + + * deps: on-finished@~2.1.1 + - Fix handling of pipelined requests + * deps: qs@2.3.0 + - Fix parsing of mixed implicit and explicit arrays + * deps: type-is@~1.5.2 + - deps: mime-types@~2.0.2 + +1.9.0 / 2014-09-24 +================== + + * include the charset in "unsupported charset" error message + * include the encoding in "unsupported content encoding" error message + * deps: depd@~1.0.0 + +1.8.4 / 2014-09-23 +================== + + * fix content encoding to be case-insensitive + +1.8.3 / 2014-09-19 +================== + + * deps: qs@2.2.4 + - Fix issue with object keys starting with numbers truncated + +1.8.2 / 2014-09-15 +================== + + * deps: depd@0.4.5 + +1.8.1 / 2014-09-07 +================== + + * deps: media-typer@0.3.0 + * deps: type-is@~1.5.1 + +1.8.0 / 2014-09-05 +================== + + * make empty-body-handling consistent between chunked requests + - empty `json` produces `{}` + - empty `raw` produces `new Buffer(0)` + - empty `text` produces `''` + - empty `urlencoded` produces `{}` + * deps: qs@2.2.3 + - Fix issue where first empty value in array is discarded + * deps: type-is@~1.5.0 + - fix `hasbody` to be true for `content-length: 0` + +1.7.0 / 2014-09-01 +================== + + * add `parameterLimit` option to `urlencoded` parser + * change `urlencoded` extended array limit to 100 + * respond with 413 when over `parameterLimit` in `urlencoded` + +1.6.7 / 2014-08-29 +================== + + * deps: qs@2.2.2 + - Remove unnecessary cloning + +1.6.6 / 2014-08-27 +================== + + * deps: qs@2.2.0 + - Array parsing fix + - Performance improvements + +1.6.5 / 2014-08-16 +================== + + * deps: on-finished@2.1.0 + +1.6.4 / 2014-08-14 +================== + + * deps: qs@1.2.2 + +1.6.3 / 2014-08-10 +================== + + * deps: qs@1.2.1 + +1.6.2 / 2014-08-07 +================== + + * deps: qs@1.2.0 + - Fix parsing array of objects + +1.6.1 / 2014-08-06 +================== + + * deps: qs@1.1.0 + - Accept urlencoded square brackets + - Accept empty values in implicit array notation + +1.6.0 / 2014-08-05 +================== + + * deps: qs@1.0.2 + - Complete rewrite + - Limits array length to 20 + - Limits object depth to 5 + - Limits parameters to 1,000 + +1.5.2 / 2014-07-27 +================== + + * deps: depd@0.4.4 + - Work-around v8 generating empty stack traces + +1.5.1 / 2014-07-26 +================== + + * deps: depd@0.4.3 + - Fix exception when global `Error.stackTraceLimit` is too low + +1.5.0 / 2014-07-20 +================== + + * deps: depd@0.4.2 + - Add `TRACE_DEPRECATION` environment variable + - Remove non-standard grey color from color output + - Support `--no-deprecation` argument + - Support `--trace-deprecation` argument + * deps: iconv-lite@0.4.4 + - Added encoding UTF-7 + * deps: raw-body@1.3.0 + - deps: iconv-lite@0.4.4 + - Added encoding UTF-7 + - Fix `Cannot switch to old mode now` error on Node.js 0.10+ + * deps: type-is@~1.3.2 + +1.4.3 / 2014-06-19 +================== + + * deps: type-is@1.3.1 + - fix global variable leak + +1.4.2 / 2014-06-19 +================== + + * deps: type-is@1.3.0 + - improve type parsing + +1.4.1 / 2014-06-19 +================== + + * fix urlencoded extended deprecation message + +1.4.0 / 2014-06-19 +================== + + * add `text` parser + * add `raw` parser + * check accepted charset in content-type (accepts utf-8) + * check accepted encoding in content-encoding (accepts identity) + * deprecate `bodyParser()` middleware; use `.json()` and `.urlencoded()` as needed + * deprecate `urlencoded()` without provided `extended` option + * lazy-load urlencoded parsers + * parsers split into files for reduced mem usage + * support gzip and deflate bodies + - set `inflate: false` to turn off + * deps: raw-body@1.2.2 + - Support all encodings from `iconv-lite` + +1.3.1 / 2014-06-11 +================== + + * deps: type-is@1.2.1 + - Switch dependency from mime to mime-types@1.0.0 + +1.3.0 / 2014-05-31 +================== + + * add `extended` option to urlencoded parser + +1.2.2 / 2014-05-27 +================== + + * deps: raw-body@1.1.6 + - assert stream encoding on node.js 0.8 + - assert stream encoding on node.js < 0.10.6 + - deps: bytes@1 + +1.2.1 / 2014-05-26 +================== + + * invoke `next(err)` after request fully read + - prevents hung responses and socket hang ups + +1.2.0 / 2014-05-11 +================== + + * add `verify` option + * deps: type-is@1.2.0 + - support suffix matching + +1.1.2 / 2014-05-11 +================== + + * improve json parser speed + +1.1.1 / 2014-05-11 +================== + + * fix repeated limit parsing with every request + +1.1.0 / 2014-05-10 +================== + + * add `type` option + * deps: pin for safety and consistency + +1.0.2 / 2014-04-14 +================== + + * use `type-is` module + +1.0.1 / 2014-03-20 +================== + + * lower default limits to 100kb diff --git a/scorpio-broker/http_server/node_modules/body-parser/LICENSE b/scorpio-broker/http_server/node_modules/body-parser/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..386b7b6946e47bc46f8138791049b4e6a7cef889 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/body-parser/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/scorpio-broker/http_server/node_modules/body-parser/README.md b/scorpio-broker/http_server/node_modules/body-parser/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f6661b7d33c16808aaa4a19624666bc52f5cb639 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/body-parser/README.md @@ -0,0 +1,476 @@ +# body-parser + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Build Status][ci-image]][ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] +[![OpenSSF Scorecard Badge][ossf-scorecard-badge]][ossf-scorecard-visualizer] + +Node.js body parsing middleware. + +Parse incoming request bodies in a middleware before your handlers, available +under the `req.body` property. + +**Note** As `req.body`'s shape is based on user-controlled input, all +properties and values in this object are untrusted and should be validated +before trusting. For example, `req.body.foo.toString()` may fail in multiple +ways, for example the `foo` property may not be there or may not be a string, +and `toString` may not be a function and instead a string or other user input. + +[Learn about the anatomy of an HTTP transaction in Node.js](https://nodejs.org/en/docs/guides/anatomy-of-an-http-transaction/). + +_This does not handle multipart bodies_, due to their complex and typically +large nature. For multipart bodies, you may be interested in the following +modules: + + * [busboy](https://www.npmjs.org/package/busboy#readme) and + [connect-busboy](https://www.npmjs.org/package/connect-busboy#readme) + * [multiparty](https://www.npmjs.org/package/multiparty#readme) and + [connect-multiparty](https://www.npmjs.org/package/connect-multiparty#readme) + * [formidable](https://www.npmjs.org/package/formidable#readme) + * [multer](https://www.npmjs.org/package/multer#readme) + +This module provides the following parsers: + + * [JSON body parser](#bodyparserjsonoptions) + * [Raw body parser](#bodyparserrawoptions) + * [Text body parser](#bodyparsertextoptions) + * [URL-encoded form body parser](#bodyparserurlencodedoptions) + +Other body parsers you might be interested in: + +- [body](https://www.npmjs.org/package/body#readme) +- [co-body](https://www.npmjs.org/package/co-body#readme) + +## Installation + +```sh +$ npm install body-parser +``` + +## API + +```js +var bodyParser = require('body-parser') +``` + +The `bodyParser` object exposes various factories to create middlewares. All +middlewares will populate the `req.body` property with the parsed body when +the `Content-Type` request header matches the `type` option, or an empty +object (`{}`) if there was no body to parse, the `Content-Type` was not matched, +or an error occurred. + +The various errors returned by this module are described in the +[errors section](#errors). + +### bodyParser.json([options]) + +Returns middleware that only parses `json` and only looks at requests where +the `Content-Type` header matches the `type` option. This parser accepts any +Unicode encoding of the body and supports automatic inflation of `gzip` and +`deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). + +#### Options + +The `json` function takes an optional `options` object that may contain any of +the following keys: + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### reviver + +The `reviver` option is passed directly to `JSON.parse` as the second +argument. You can find more information on this argument +[in the MDN documentation about JSON.parse](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#Example.3A_Using_the_reviver_parameter). + +##### strict + +When set to `true`, will only accept arrays and objects; when `false` will +accept anything `JSON.parse` accepts. Defaults to `true`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not a +function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `json`), a mime type (like `application/json`), or +a mime type with a wildcard (like `*/*` or `*/json`). If a function, the `type` +option is called as `fn(req)` and the request is parsed if it returns a truthy +value. Defaults to `application/json`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.raw([options]) + +Returns middleware that parses all bodies as a `Buffer` and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser supports automatic inflation of `gzip` and `deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This will be a `Buffer` object +of the body. + +#### Options + +The `raw` function takes an optional `options` object that may contain any of +the following keys: + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. +If not a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this +can be an extension name (like `bin`), a mime type (like +`application/octet-stream`), or a mime type with a wildcard (like `*/*` or +`application/*`). If a function, the `type` option is called as `fn(req)` +and the request is parsed if it returns a truthy value. Defaults to +`application/octet-stream`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.text([options]) + +Returns middleware that parses all bodies as a string and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser supports automatic inflation of `gzip` and `deflate` encodings. + +A new `body` string containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This will be a string of the +body. + +#### Options + +The `text` function takes an optional `options` object that may contain any of +the following keys: + +##### defaultCharset + +Specify the default character set for the text content if the charset is not +specified in the `Content-Type` header of the request. Defaults to `utf-8`. + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not +a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `txt`), a mime type (like `text/plain`), or a mime +type with a wildcard (like `*/*` or `text/*`). If a function, the `type` +option is called as `fn(req)` and the request is parsed if it returns a +truthy value. Defaults to `text/plain`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +### bodyParser.urlencoded([options]) + +Returns middleware that only parses `urlencoded` bodies and only looks at +requests where the `Content-Type` header matches the `type` option. This +parser accepts only UTF-8 encoding of the body and supports automatic +inflation of `gzip` and `deflate` encodings. + +A new `body` object containing the parsed data is populated on the `request` +object after the middleware (i.e. `req.body`). This object will contain +key-value pairs, where the value can be a string or array (when `extended` is +`false`), or any type (when `extended` is `true`). + +#### Options + +The `urlencoded` function takes an optional `options` object that may contain +any of the following keys: + +##### extended + +The `extended` option allows to choose between parsing the URL-encoded data +with the `querystring` library (when `false`) or the `qs` library (when +`true`). The "extended" syntax allows for rich objects and arrays to be +encoded into the URL-encoded format, allowing for a JSON-like experience +with URL-encoded. For more information, please +[see the qs library](https://www.npmjs.org/package/qs#readme). + +Defaults to `true`, but using the default has been deprecated. Please +research into the difference between `qs` and `querystring` and choose the +appropriate setting. + +##### inflate + +When set to `true`, then deflated (compressed) bodies will be inflated; when +`false`, deflated bodies are rejected. Defaults to `true`. + +##### limit + +Controls the maximum request body size. If this is a number, then the value +specifies the number of bytes; if it is a string, the value is passed to the +[bytes](https://www.npmjs.com/package/bytes) library for parsing. Defaults +to `'100kb'`. + +##### parameterLimit + +The `parameterLimit` option controls the maximum number of parameters that +are allowed in the URL-encoded data. If a request contains more parameters +than this value, a 413 will be returned to the client. Defaults to `1000`. + +##### type + +The `type` option is used to determine what media type the middleware will +parse. This option can be a string, array of strings, or a function. If not +a function, `type` option is passed directly to the +[type-is](https://www.npmjs.org/package/type-is#readme) library and this can +be an extension name (like `urlencoded`), a mime type (like +`application/x-www-form-urlencoded`), or a mime type with a wildcard (like +`*/x-www-form-urlencoded`). If a function, the `type` option is called as +`fn(req)` and the request is parsed if it returns a truthy value. Defaults +to `application/x-www-form-urlencoded`. + +##### verify + +The `verify` option, if supplied, is called as `verify(req, res, buf, encoding)`, +where `buf` is a `Buffer` of the raw request body and `encoding` is the +encoding of the request. The parsing can be aborted by throwing an error. + +#### depth + +The `depth` option is used to configure the maximum depth of the `qs` library when `extended` is `true`. This allows you to limit the amount of keys that are parsed and can be useful to prevent certain types of abuse. Defaults to `32`. It is recommended to keep this value as low as possible. + +## Errors + +The middlewares provided by this module create errors using the +[`http-errors` module](https://www.npmjs.com/package/http-errors). The errors +will typically have a `status`/`statusCode` property that contains the suggested +HTTP response code, an `expose` property to determine if the `message` property +should be displayed to the client, a `type` property to determine the type of +error without matching against the `message`, and a `body` property containing +the read body, if available. + +The following are the common errors created, though any error can come through +for various reasons. + +### content encoding unsupported + +This error will occur when the request had a `Content-Encoding` header that +contained an encoding but the "inflation" option was set to `false`. The +`status` property is set to `415`, the `type` property is set to +`'encoding.unsupported'`, and the `charset` property will be set to the +encoding that is unsupported. + +### entity parse failed + +This error will occur when the request contained an entity that could not be +parsed by the middleware. The `status` property is set to `400`, the `type` +property is set to `'entity.parse.failed'`, and the `body` property is set to +the entity value that failed parsing. + +### entity verify failed + +This error will occur when the request contained an entity that could not be +failed verification by the defined `verify` option. The `status` property is +set to `403`, the `type` property is set to `'entity.verify.failed'`, and the +`body` property is set to the entity value that failed verification. + +### request aborted + +This error will occur when the request is aborted by the client before reading +the body has finished. The `received` property will be set to the number of +bytes received before the request was aborted and the `expected` property is +set to the number of expected bytes. The `status` property is set to `400` +and `type` property is set to `'request.aborted'`. + +### request entity too large + +This error will occur when the request body's size is larger than the "limit" +option. The `limit` property will be set to the byte limit and the `length` +property will be set to the request body's length. The `status` property is +set to `413` and the `type` property is set to `'entity.too.large'`. + +### request size did not match content length + +This error will occur when the request's length did not match the length from +the `Content-Length` header. This typically occurs when the request is malformed, +typically when the `Content-Length` header was calculated based on characters +instead of bytes. The `status` property is set to `400` and the `type` property +is set to `'request.size.invalid'`. + +### stream encoding should not be set + +This error will occur when something called the `req.setEncoding` method prior +to this middleware. This module operates directly on bytes only and you cannot +call `req.setEncoding` when using this module. The `status` property is set to +`500` and the `type` property is set to `'stream.encoding.set'`. + +### stream is not readable + +This error will occur when the request is no longer readable when this middleware +attempts to read it. This typically means something other than a middleware from +this module read the request body already and the middleware was also configured to +read the same request. The `status` property is set to `500` and the `type` +property is set to `'stream.not.readable'`. + +### too many parameters + +This error will occur when the content of the request exceeds the configured +`parameterLimit` for the `urlencoded` parser. The `status` property is set to +`413` and the `type` property is set to `'parameters.too.many'`. + +### unsupported charset "BOGUS" + +This error will occur when the request had a charset parameter in the +`Content-Type` header, but the `iconv-lite` module does not support it OR the +parser does not support it. The charset is contained in the message as well +as in the `charset` property. The `status` property is set to `415`, the +`type` property is set to `'charset.unsupported'`, and the `charset` property +is set to the charset that is unsupported. + +### unsupported content encoding "bogus" + +This error will occur when the request had a `Content-Encoding` header that +contained an unsupported encoding. The encoding is contained in the message +as well as in the `encoding` property. The `status` property is set to `415`, +the `type` property is set to `'encoding.unsupported'`, and the `encoding` +property is set to the encoding that is unsupported. + +### The input exceeded the depth + +This error occurs when using `bodyParser.urlencoded` with the `extended` property set to `true` and the input exceeds the configured `depth` option. The `status` property is set to `400`. It is recommended to review the `depth` option and evaluate if it requires a higher value. When the `depth` option is set to `32` (default value), the error will not be thrown. + +## Examples + +### Express/Connect top-level generic + +This example demonstrates adding a generic JSON and URL-encoded parser as a +top-level middleware, which will parse the bodies of all incoming requests. +This is the simplest setup. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// parse application/x-www-form-urlencoded +app.use(bodyParser.urlencoded({ extended: false })) + +// parse application/json +app.use(bodyParser.json()) + +app.use(function (req, res) { + res.setHeader('Content-Type', 'text/plain') + res.write('you posted:\n') + res.end(JSON.stringify(req.body, null, 2)) +}) +``` + +### Express route-specific + +This example demonstrates adding body parsers specifically to the routes that +need them. In general, this is the most recommended way to use body-parser with +Express. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// create application/json parser +var jsonParser = bodyParser.json() + +// create application/x-www-form-urlencoded parser +var urlencodedParser = bodyParser.urlencoded({ extended: false }) + +// POST /login gets urlencoded bodies +app.post('/login', urlencodedParser, function (req, res) { + res.send('welcome, ' + req.body.username) +}) + +// POST /api/users gets JSON bodies +app.post('/api/users', jsonParser, function (req, res) { + // create user in req.body +}) +``` + +### Change accepted type for parsers + +All the parsers accept a `type` option which allows you to change the +`Content-Type` that the middleware will parse. + +```js +var express = require('express') +var bodyParser = require('body-parser') + +var app = express() + +// parse various different custom JSON types as JSON +app.use(bodyParser.json({ type: 'application/*+json' })) + +// parse some custom thing into a Buffer +app.use(bodyParser.raw({ type: 'application/vnd.custom-type' })) + +// parse an HTML body into a string +app.use(bodyParser.text({ type: 'text/html' })) +``` + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/expressjs/body-parser/master?label=ci +[ci-url]: https://github.com/expressjs/body-parser/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/expressjs/body-parser/master +[coveralls-url]: https://coveralls.io/r/expressjs/body-parser?branch=master +[node-version-image]: https://badgen.net/npm/node/body-parser +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/body-parser +[npm-url]: https://npmjs.org/package/body-parser +[npm-version-image]: https://badgen.net/npm/v/body-parser +[ossf-scorecard-badge]: https://api.scorecard.dev/projects/github.com/expressjs/body-parser/badge +[ossf-scorecard-visualizer]: https://ossf.github.io/scorecard-visualizer/#/projects/github.com/expressjs/body-parser \ No newline at end of file diff --git a/scorpio-broker/http_server/node_modules/body-parser/SECURITY.md b/scorpio-broker/http_server/node_modules/body-parser/SECURITY.md new file mode 100644 index 0000000000000000000000000000000000000000..9694d429616df706508285a1ef185d40e45cdfae --- /dev/null +++ b/scorpio-broker/http_server/node_modules/body-parser/SECURITY.md @@ -0,0 +1,25 @@ +# Security Policies and Procedures + +## Reporting a Bug + +The Express team and community take all security bugs seriously. Thank you +for improving the security of Express. We appreciate your efforts and +responsible disclosure and will make every effort to acknowledge your +contributions. + +Report security bugs by emailing the current owner(s) of `body-parser`. This +information can be found in the npm registry using the command +`npm owner ls body-parser`. +If unsure or unable to get the information from the above, open an issue +in the [project issue tracker](https://github.com/expressjs/body-parser/issues) +asking for the current contact information. + +To ensure the timely response to your report, please ensure that the entirety +of the report is contained within the email body and not solely behind a web +link or an attachment. + +At least one owner will acknowledge your email within 48 hours, and will send a +more detailed response within 48 hours indicating the next steps in handling +your report. After the initial reply to your report, the owners will +endeavor to keep you informed of the progress towards a fix and full +announcement, and may ask for additional information or guidance. diff --git a/scorpio-broker/http_server/node_modules/body-parser/index.js b/scorpio-broker/http_server/node_modules/body-parser/index.js new file mode 100644 index 0000000000000000000000000000000000000000..bb24d739d9c5fade336cdf76b08b784ae2594d77 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/body-parser/index.js @@ -0,0 +1,156 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var deprecate = require('depd')('body-parser') + +/** + * Cache of loaded parsers. + * @private + */ + +var parsers = Object.create(null) + +/** + * @typedef Parsers + * @type {function} + * @property {function} json + * @property {function} raw + * @property {function} text + * @property {function} urlencoded + */ + +/** + * Module exports. + * @type {Parsers} + */ + +exports = module.exports = deprecate.function(bodyParser, + 'bodyParser: use individual json/urlencoded middlewares') + +/** + * JSON parser. + * @public + */ + +Object.defineProperty(exports, 'json', { + configurable: true, + enumerable: true, + get: createParserGetter('json') +}) + +/** + * Raw parser. + * @public + */ + +Object.defineProperty(exports, 'raw', { + configurable: true, + enumerable: true, + get: createParserGetter('raw') +}) + +/** + * Text parser. + * @public + */ + +Object.defineProperty(exports, 'text', { + configurable: true, + enumerable: true, + get: createParserGetter('text') +}) + +/** + * URL-encoded parser. + * @public + */ + +Object.defineProperty(exports, 'urlencoded', { + configurable: true, + enumerable: true, + get: createParserGetter('urlencoded') +}) + +/** + * Create a middleware to parse json and urlencoded bodies. + * + * @param {object} [options] + * @return {function} + * @deprecated + * @public + */ + +function bodyParser (options) { + // use default type for parsers + var opts = Object.create(options || null, { + type: { + configurable: true, + enumerable: true, + value: undefined, + writable: true + } + }) + + var _urlencoded = exports.urlencoded(opts) + var _json = exports.json(opts) + + return function bodyParser (req, res, next) { + _json(req, res, function (err) { + if (err) return next(err) + _urlencoded(req, res, next) + }) + } +} + +/** + * Create a getter for loading a parser. + * @private + */ + +function createParserGetter (name) { + return function get () { + return loadParser(name) + } +} + +/** + * Load a parser module. + * @private + */ + +function loadParser (parserName) { + var parser = parsers[parserName] + + if (parser !== undefined) { + return parser + } + + // this uses a switch for static require analysis + switch (parserName) { + case 'json': + parser = require('./lib/types/json') + break + case 'raw': + parser = require('./lib/types/raw') + break + case 'text': + parser = require('./lib/types/text') + break + case 'urlencoded': + parser = require('./lib/types/urlencoded') + break + } + + // store to prevent invoking require() + return (parsers[parserName] = parser) +} diff --git a/scorpio-broker/http_server/node_modules/body-parser/lib/read.js b/scorpio-broker/http_server/node_modules/body-parser/lib/read.js new file mode 100644 index 0000000000000000000000000000000000000000..fce6283f50961e68c2f576031ed5e3d4fdc39984 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/body-parser/lib/read.js @@ -0,0 +1,205 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var createError = require('http-errors') +var destroy = require('destroy') +var getBody = require('raw-body') +var iconv = require('iconv-lite') +var onFinished = require('on-finished') +var unpipe = require('unpipe') +var zlib = require('zlib') + +/** + * Module exports. + */ + +module.exports = read + +/** + * Read a request into a buffer and parse. + * + * @param {object} req + * @param {object} res + * @param {function} next + * @param {function} parse + * @param {function} debug + * @param {object} options + * @private + */ + +function read (req, res, next, parse, debug, options) { + var length + var opts = options + var stream + + // flag as parsed + req._body = true + + // read options + var encoding = opts.encoding !== null + ? opts.encoding + : null + var verify = opts.verify + + try { + // get the content stream + stream = contentstream(req, debug, opts.inflate) + length = stream.length + stream.length = undefined + } catch (err) { + return next(err) + } + + // set raw-body options + opts.length = length + opts.encoding = verify + ? null + : encoding + + // assert charset is supported + if (opts.encoding === null && encoding !== null && !iconv.encodingExists(encoding)) { + return next(createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', { + charset: encoding.toLowerCase(), + type: 'charset.unsupported' + })) + } + + // read body + debug('read body') + getBody(stream, opts, function (error, body) { + if (error) { + var _error + + if (error.type === 'encoding.unsupported') { + // echo back charset + _error = createError(415, 'unsupported charset "' + encoding.toUpperCase() + '"', { + charset: encoding.toLowerCase(), + type: 'charset.unsupported' + }) + } else { + // set status code on error + _error = createError(400, error) + } + + // unpipe from stream and destroy + if (stream !== req) { + unpipe(req) + destroy(stream, true) + } + + // read off entire request + dump(req, function onfinished () { + next(createError(400, _error)) + }) + return + } + + // verify + if (verify) { + try { + debug('verify body') + verify(req, res, body, encoding) + } catch (err) { + next(createError(403, err, { + body: body, + type: err.type || 'entity.verify.failed' + })) + return + } + } + + // parse + var str = body + try { + debug('parse body') + str = typeof body !== 'string' && encoding !== null + ? iconv.decode(body, encoding) + : body + req.body = parse(str) + } catch (err) { + next(createError(400, err, { + body: str, + type: err.type || 'entity.parse.failed' + })) + return + } + + next() + }) +} + +/** + * Get the content stream of the request. + * + * @param {object} req + * @param {function} debug + * @param {boolean} [inflate=true] + * @return {object} + * @api private + */ + +function contentstream (req, debug, inflate) { + var encoding = (req.headers['content-encoding'] || 'identity').toLowerCase() + var length = req.headers['content-length'] + var stream + + debug('content-encoding "%s"', encoding) + + if (inflate === false && encoding !== 'identity') { + throw createError(415, 'content encoding unsupported', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } + + switch (encoding) { + case 'deflate': + stream = zlib.createInflate() + debug('inflate body') + req.pipe(stream) + break + case 'gzip': + stream = zlib.createGunzip() + debug('gunzip body') + req.pipe(stream) + break + case 'identity': + stream = req + stream.length = length + break + default: + throw createError(415, 'unsupported content encoding "' + encoding + '"', { + encoding: encoding, + type: 'encoding.unsupported' + }) + } + + return stream +} + +/** + * Dump the contents of a request. + * + * @param {object} req + * @param {function} callback + * @api private + */ + +function dump (req, callback) { + if (onFinished.isFinished(req)) { + callback(null) + } else { + onFinished(req, callback) + req.resume() + } +} diff --git a/scorpio-broker/http_server/node_modules/body-parser/lib/types/json.js b/scorpio-broker/http_server/node_modules/body-parser/lib/types/json.js new file mode 100644 index 0000000000000000000000000000000000000000..59f3f7e28f2416cac957b5e35dc78839a5019e0d --- /dev/null +++ b/scorpio-broker/http_server/node_modules/body-parser/lib/types/json.js @@ -0,0 +1,247 @@ +/*! + * body-parser + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var createError = require('http-errors') +var debug = require('debug')('body-parser:json') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = json + +/** + * RegExp to match the first non-space in a string. + * + * Allowed whitespace is defined in RFC 7159: + * + * ws = *( + * %x20 / ; Space + * %x09 / ; Horizontal tab + * %x0A / ; Line feed or New line + * %x0D ) ; Carriage return + */ + +var FIRST_CHAR_REGEXP = /^[\x20\x09\x0a\x0d]*([^\x20\x09\x0a\x0d])/ // eslint-disable-line no-control-regex + +var JSON_SYNTAX_CHAR = '#' +var JSON_SYNTAX_REGEXP = /#+/g + +/** + * Create a middleware to parse JSON bodies. + * + * @param {object} [options] + * @return {function} + * @public + */ + +function json (options) { + var opts = options || {} + + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var inflate = opts.inflate !== false + var reviver = opts.reviver + var strict = opts.strict !== false + var type = opts.type || 'application/json' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (body) { + if (body.length === 0) { + // special-case empty json body, as it's a common client-side mistake + // TODO: maybe make this configurable or part of "strict" option + return {} + } + + if (strict) { + var first = firstchar(body) + + if (first !== '{' && first !== '[') { + debug('strict violation') + throw createStrictSyntaxError(body, first) + } + } + + try { + debug('parse json') + return JSON.parse(body, reviver) + } catch (e) { + throw normalizeJsonSyntaxError(e, { + message: e.message, + stack: e.stack + }) + } + } + + return function jsonParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // assert charset per RFC 7159 sec 8.1 + var charset = getCharset(req) || 'utf-8' + if (charset.slice(0, 4) !== 'utf-') { + debug('invalid charset') + next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', { + charset: charset, + type: 'charset.unsupported' + })) + return + } + + // read + read(req, res, next, parse, debug, { + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Create strict violation syntax error matching native error. + * + * @param {string} str + * @param {string} char + * @return {Error} + * @private + */ + +function createStrictSyntaxError (str, char) { + var index = str.indexOf(char) + var partial = '' + + if (index !== -1) { + partial = str.substring(0, index) + JSON_SYNTAX_CHAR + + for (var i = index + 1; i < str.length; i++) { + partial += JSON_SYNTAX_CHAR + } + } + + try { + JSON.parse(partial); /* istanbul ignore next */ throw new SyntaxError('strict violation') + } catch (e) { + return normalizeJsonSyntaxError(e, { + message: e.message.replace(JSON_SYNTAX_REGEXP, function (placeholder) { + return str.substring(index, index + placeholder.length) + }), + stack: e.stack + }) + } +} + +/** + * Get the first non-whitespace character in a string. + * + * @param {string} str + * @return {function} + * @private + */ + +function firstchar (str) { + var match = FIRST_CHAR_REGEXP.exec(str) + + return match + ? match[1] + : undefined +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Normalize a SyntaxError for JSON.parse. + * + * @param {SyntaxError} error + * @param {object} obj + * @return {SyntaxError} + */ + +function normalizeJsonSyntaxError (error, obj) { + var keys = Object.getOwnPropertyNames(error) + + for (var i = 0; i < keys.length; i++) { + var key = keys[i] + if (key !== 'stack' && key !== 'message') { + delete error[key] + } + } + + // replace stack before message for Node.js 0.10 and below + error.stack = obj.stack.replace(error.message, obj.message) + error.message = obj.message + + return error +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/scorpio-broker/http_server/node_modules/body-parser/lib/types/raw.js b/scorpio-broker/http_server/node_modules/body-parser/lib/types/raw.js new file mode 100644 index 0000000000000000000000000000000000000000..f5d1b67475405284e3dac312f92ade101571329f --- /dev/null +++ b/scorpio-broker/http_server/node_modules/body-parser/lib/types/raw.js @@ -0,0 +1,101 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + */ + +var bytes = require('bytes') +var debug = require('debug')('body-parser:raw') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = raw + +/** + * Create a middleware to parse raw bodies. + * + * @param {object} [options] + * @return {function} + * @api public + */ + +function raw (options) { + var opts = options || {} + + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'application/octet-stream' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (buf) { + return buf + } + + return function rawParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // read + read(req, res, next, parse, debug, { + encoding: null, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/scorpio-broker/http_server/node_modules/body-parser/lib/types/text.js b/scorpio-broker/http_server/node_modules/body-parser/lib/types/text.js new file mode 100644 index 0000000000000000000000000000000000000000..083a00908a6299a8ef72f477983359f5675f82ef --- /dev/null +++ b/scorpio-broker/http_server/node_modules/body-parser/lib/types/text.js @@ -0,0 +1,121 @@ +/*! + * body-parser + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var debug = require('debug')('body-parser:text') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = text + +/** + * Create a middleware to parse text bodies. + * + * @param {object} [options] + * @return {function} + * @api public + */ + +function text (options) { + var opts = options || {} + + var defaultCharset = opts.defaultCharset || 'utf-8' + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'text/plain' + var verify = opts.verify || false + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (buf) { + return buf + } + + return function textParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // get charset + var charset = getCharset(req) || defaultCharset + + // read + read(req, res, next, parse, debug, { + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify + }) + } +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/scorpio-broker/http_server/node_modules/body-parser/lib/types/urlencoded.js b/scorpio-broker/http_server/node_modules/body-parser/lib/types/urlencoded.js new file mode 100644 index 0000000000000000000000000000000000000000..2bd4485f54e3d8aaa08eec1f5fdbd525a8f5ba82 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/body-parser/lib/types/urlencoded.js @@ -0,0 +1,307 @@ +/*! + * body-parser + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var bytes = require('bytes') +var contentType = require('content-type') +var createError = require('http-errors') +var debug = require('debug')('body-parser:urlencoded') +var deprecate = require('depd')('body-parser') +var read = require('../read') +var typeis = require('type-is') + +/** + * Module exports. + */ + +module.exports = urlencoded + +/** + * Cache of parser modules. + */ + +var parsers = Object.create(null) + +/** + * Create a middleware to parse urlencoded bodies. + * + * @param {object} [options] + * @return {function} + * @public + */ + +function urlencoded (options) { + var opts = options || {} + + // notice because option default will flip in next major + if (opts.extended === undefined) { + deprecate('undefined extended: provide extended option') + } + + var extended = opts.extended !== false + var inflate = opts.inflate !== false + var limit = typeof opts.limit !== 'number' + ? bytes.parse(opts.limit || '100kb') + : opts.limit + var type = opts.type || 'application/x-www-form-urlencoded' + var verify = opts.verify || false + var depth = typeof opts.depth !== 'number' + ? Number(opts.depth || 32) + : opts.depth + + if (verify !== false && typeof verify !== 'function') { + throw new TypeError('option verify must be function') + } + + // create the appropriate query parser + var queryparse = extended + ? extendedparser(opts) + : simpleparser(opts) + + // create the appropriate type checking function + var shouldParse = typeof type !== 'function' + ? typeChecker(type) + : type + + function parse (body) { + return body.length + ? queryparse(body) + : {} + } + + return function urlencodedParser (req, res, next) { + if (req._body) { + debug('body already parsed') + next() + return + } + + req.body = req.body || {} + + // skip requests without bodies + if (!typeis.hasBody(req)) { + debug('skip empty body') + next() + return + } + + debug('content-type %j', req.headers['content-type']) + + // determine if request should be parsed + if (!shouldParse(req)) { + debug('skip parsing') + next() + return + } + + // assert charset + var charset = getCharset(req) || 'utf-8' + if (charset !== 'utf-8') { + debug('invalid charset') + next(createError(415, 'unsupported charset "' + charset.toUpperCase() + '"', { + charset: charset, + type: 'charset.unsupported' + })) + return + } + + // read + read(req, res, next, parse, debug, { + debug: debug, + encoding: charset, + inflate: inflate, + limit: limit, + verify: verify, + depth: depth + }) + } +} + +/** + * Get the extended query parser. + * + * @param {object} options + */ + +function extendedparser (options) { + var parameterLimit = options.parameterLimit !== undefined + ? options.parameterLimit + : 1000 + + var depth = typeof options.depth !== 'number' + ? Number(options.depth || 32) + : options.depth + var parse = parser('qs') + + if (isNaN(parameterLimit) || parameterLimit < 1) { + throw new TypeError('option parameterLimit must be a positive number') + } + + if (isNaN(depth) || depth < 0) { + throw new TypeError('option depth must be a zero or a positive number') + } + + if (isFinite(parameterLimit)) { + parameterLimit = parameterLimit | 0 + } + + return function queryparse (body) { + var paramCount = parameterCount(body, parameterLimit) + + if (paramCount === undefined) { + debug('too many parameters') + throw createError(413, 'too many parameters', { + type: 'parameters.too.many' + }) + } + + var arrayLimit = Math.max(100, paramCount) + + debug('parse extended urlencoding') + try { + return parse(body, { + allowPrototypes: true, + arrayLimit: arrayLimit, + depth: depth, + strictDepth: true, + parameterLimit: parameterLimit + }) + } catch (err) { + if (err instanceof RangeError) { + throw createError(400, 'The input exceeded the depth', { + type: 'querystring.parse.rangeError' + }) + } else { + throw err + } + } + } +} + +/** + * Get the charset of a request. + * + * @param {object} req + * @api private + */ + +function getCharset (req) { + try { + return (contentType.parse(req).parameters.charset || '').toLowerCase() + } catch (e) { + return undefined + } +} + +/** + * Count the number of parameters, stopping once limit reached + * + * @param {string} body + * @param {number} limit + * @api private + */ + +function parameterCount (body, limit) { + var count = 0 + var index = 0 + + while ((index = body.indexOf('&', index)) !== -1) { + count++ + index++ + + if (count === limit) { + return undefined + } + } + + return count +} + +/** + * Get parser for module name dynamically. + * + * @param {string} name + * @return {function} + * @api private + */ + +function parser (name) { + var mod = parsers[name] + + if (mod !== undefined) { + return mod.parse + } + + // this uses a switch for static require analysis + switch (name) { + case 'qs': + mod = require('qs') + break + case 'querystring': + mod = require('querystring') + break + } + + // store to prevent invoking require() + parsers[name] = mod + + return mod.parse +} + +/** + * Get the simple query parser. + * + * @param {object} options + */ + +function simpleparser (options) { + var parameterLimit = options.parameterLimit !== undefined + ? options.parameterLimit + : 1000 + var parse = parser('querystring') + + if (isNaN(parameterLimit) || parameterLimit < 1) { + throw new TypeError('option parameterLimit must be a positive number') + } + + if (isFinite(parameterLimit)) { + parameterLimit = parameterLimit | 0 + } + + return function queryparse (body) { + var paramCount = parameterCount(body, parameterLimit) + + if (paramCount === undefined) { + debug('too many parameters') + throw createError(413, 'too many parameters', { + type: 'parameters.too.many' + }) + } + + debug('parse urlencoding') + return parse(body, undefined, undefined, { maxKeys: parameterLimit }) + } +} + +/** + * Get the simple type checker. + * + * @param {string} type + * @return {function} + */ + +function typeChecker (type) { + return function checkType (req) { + return Boolean(typeis(req, type)) + } +} diff --git a/scorpio-broker/http_server/node_modules/body-parser/package.json b/scorpio-broker/http_server/node_modules/body-parser/package.json new file mode 100644 index 0000000000000000000000000000000000000000..3c9926fc58c48384546542d899382ff7042c92c3 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/body-parser/package.json @@ -0,0 +1,56 @@ +{ + "name": "body-parser", + "description": "Node.js body parsing middleware", + "version": "1.20.3", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "expressjs/body-parser", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "devDependencies": { + "eslint": "8.34.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.27.5", + "eslint-plugin-markdown": "3.0.0", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "6.1.1", + "eslint-plugin-standard": "4.1.0", + "methods": "1.1.2", + "mocha": "10.2.0", + "nyc": "15.1.0", + "safe-buffer": "5.2.1", + "supertest": "6.3.3" + }, + "files": [ + "lib/", + "LICENSE", + "HISTORY.md", + "SECURITY.md", + "index.js" + ], + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --require test/support/env --reporter spec --check-leaks --bail test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/scorpio-broker/http_server/node_modules/bytes/History.md b/scorpio-broker/http_server/node_modules/bytes/History.md new file mode 100644 index 0000000000000000000000000000000000000000..d60ce0e6df2efd3f83c08b098d1b7b683b96ec84 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/bytes/History.md @@ -0,0 +1,97 @@ +3.1.2 / 2022-01-27 +================== + + * Fix return value for un-parsable strings + +3.1.1 / 2021-11-15 +================== + + * Fix "thousandsSeparator" incorrecting formatting fractional part + +3.1.0 / 2019-01-22 +================== + + * Add petabyte (`pb`) support + +3.0.0 / 2017-08-31 +================== + + * Change "kB" to "KB" in format output + * Remove support for Node.js 0.6 + * Remove support for ComponentJS + +2.5.0 / 2017-03-24 +================== + + * Add option "unit" + +2.4.0 / 2016-06-01 +================== + + * Add option "unitSeparator" + +2.3.0 / 2016-02-15 +================== + + * Drop partial bytes on all parsed units + * Fix non-finite numbers to `.format` to return `null` + * Fix parsing byte string that looks like hex + * perf: hoist regular expressions + +2.2.0 / 2015-11-13 +================== + + * add option "decimalPlaces" + * add option "fixedDecimals" + +2.1.0 / 2015-05-21 +================== + + * add `.format` export + * add `.parse` export + +2.0.2 / 2015-05-20 +================== + + * remove map recreation + * remove unnecessary object construction + +2.0.1 / 2015-05-07 +================== + + * fix browserify require + * remove node.extend dependency + +2.0.0 / 2015-04-12 +================== + + * add option "case" + * add option "thousandsSeparator" + * return "null" on invalid parse input + * support proper round-trip: bytes(bytes(num)) === num + * units no longer case sensitive when parsing + +1.0.0 / 2014-05-05 +================== + + * add negative support. fixes #6 + +0.3.0 / 2014-03-19 +================== + + * added terabyte support + +0.2.1 / 2013-04-01 +================== + + * add .component + +0.2.0 / 2012-10-28 +================== + + * bytes(200).should.eql('200b') + +0.1.0 / 2012-07-04 +================== + + * add bytes to string conversion [yields] diff --git a/scorpio-broker/http_server/node_modules/bytes/LICENSE b/scorpio-broker/http_server/node_modules/bytes/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..63e95a96338a608c218a7ef5805629878aaa951f --- /dev/null +++ b/scorpio-broker/http_server/node_modules/bytes/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2012-2014 TJ Holowaychuk +Copyright (c) 2015 Jed Watson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/scorpio-broker/http_server/node_modules/bytes/Readme.md b/scorpio-broker/http_server/node_modules/bytes/Readme.md new file mode 100644 index 0000000000000000000000000000000000000000..5790e23e328e045e66ec6f0b98526157b6c2abcf --- /dev/null +++ b/scorpio-broker/http_server/node_modules/bytes/Readme.md @@ -0,0 +1,152 @@ +# Bytes utility + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Build Status][ci-image]][ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Utility to parse a string bytes (ex: `1TB`) to bytes (`1099511627776`) and vice-versa. + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```bash +$ npm install bytes +``` + +## Usage + +```js +var bytes = require('bytes'); +``` + +#### bytes(number|string value, [options]): number|string|null + +Default export function. Delegates to either `bytes.format` or `bytes.parse` based on the type of `value`. + +**Arguments** + +| Name | Type | Description | +|---------|----------|--------------------| +| value | `number`|`string` | Number value to format or string value to parse | +| options | `Object` | Conversion options for `format` | + +**Returns** + +| Name | Type | Description | +|---------|------------------|-------------------------------------------------| +| results | `string`|`number`|`null` | Return null upon error. Numeric value in bytes, or string value otherwise. | + +**Example** + +```js +bytes(1024); +// output: '1KB' + +bytes('1KB'); +// output: 1024 +``` + +#### bytes.format(number value, [options]): string|null + +Format the given value in bytes into a string. If the value is negative, it is kept as such. If it is a float, it is + rounded. + +**Arguments** + +| Name | Type | Description | +|---------|----------|--------------------| +| value | `number` | Value in bytes | +| options | `Object` | Conversion options | + +**Options** + +| Property | Type | Description | +|-------------------|--------|-----------------------------------------------------------------------------------------| +| decimalPlaces | `number`|`null` | Maximum number of decimal places to include in output. Default value to `2`. | +| fixedDecimals | `boolean`|`null` | Whether to always display the maximum number of decimal places. Default value to `false` | +| thousandsSeparator | `string`|`null` | Example of values: `' '`, `','` and `'.'`... Default value to `''`. | +| unit | `string`|`null` | The unit in which the result will be returned (B/KB/MB/GB/TB). Default value to `''` (which means auto detect). | +| unitSeparator | `string`|`null` | Separator to use between number and unit. Default value to `''`. | + +**Returns** + +| Name | Type | Description | +|---------|------------------|-------------------------------------------------| +| results | `string`|`null` | Return null upon error. String value otherwise. | + +**Example** + +```js +bytes.format(1024); +// output: '1KB' + +bytes.format(1000); +// output: '1000B' + +bytes.format(1000, {thousandsSeparator: ' '}); +// output: '1 000B' + +bytes.format(1024 * 1.7, {decimalPlaces: 0}); +// output: '2KB' + +bytes.format(1024, {unitSeparator: ' '}); +// output: '1 KB' +``` + +#### bytes.parse(string|number value): number|null + +Parse the string value into an integer in bytes. If no unit is given, or `value` +is a number, it is assumed the value is in bytes. + +Supported units and abbreviations are as follows and are case-insensitive: + + * `b` for bytes + * `kb` for kilobytes + * `mb` for megabytes + * `gb` for gigabytes + * `tb` for terabytes + * `pb` for petabytes + +The units are in powers of two, not ten. This means 1kb = 1024b according to this parser. + +**Arguments** + +| Name | Type | Description | +|---------------|--------|--------------------| +| value | `string`|`number` | String to parse, or number in bytes. | + +**Returns** + +| Name | Type | Description | +|---------|-------------|-------------------------| +| results | `number`|`null` | Return null upon error. Value in bytes otherwise. | + +**Example** + +```js +bytes.parse('1KB'); +// output: 1024 + +bytes.parse('1024'); +// output: 1024 + +bytes.parse(1024); +// output: 1024 +``` + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/visionmedia/bytes.js/master?label=ci +[ci-url]: https://github.com/visionmedia/bytes.js/actions?query=workflow%3Aci +[coveralls-image]: https://badgen.net/coveralls/c/github/visionmedia/bytes.js/master +[coveralls-url]: https://coveralls.io/r/visionmedia/bytes.js?branch=master +[downloads-image]: https://badgen.net/npm/dm/bytes +[downloads-url]: https://npmjs.org/package/bytes +[npm-image]: https://badgen.net/npm/v/bytes +[npm-url]: https://npmjs.org/package/bytes diff --git a/scorpio-broker/http_server/node_modules/bytes/index.js b/scorpio-broker/http_server/node_modules/bytes/index.js new file mode 100644 index 0000000000000000000000000000000000000000..6f2d0f89e1258564bad95175159e1d8a6abd9ddf --- /dev/null +++ b/scorpio-broker/http_server/node_modules/bytes/index.js @@ -0,0 +1,170 @@ +/*! + * bytes + * Copyright(c) 2012-2014 TJ Holowaychuk + * Copyright(c) 2015 Jed Watson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +module.exports = bytes; +module.exports.format = format; +module.exports.parse = parse; + +/** + * Module variables. + * @private + */ + +var formatThousandsRegExp = /\B(?=(\d{3})+(?!\d))/g; + +var formatDecimalsRegExp = /(?:\.0*|(\.[^0]+)0+)$/; + +var map = { + b: 1, + kb: 1 << 10, + mb: 1 << 20, + gb: 1 << 30, + tb: Math.pow(1024, 4), + pb: Math.pow(1024, 5), +}; + +var parseRegExp = /^((-|\+)?(\d+(?:\.\d+)?)) *(kb|mb|gb|tb|pb)$/i; + +/** + * Convert the given value in bytes into a string or parse to string to an integer in bytes. + * + * @param {string|number} value + * @param {{ + * case: [string], + * decimalPlaces: [number] + * fixedDecimals: [boolean] + * thousandsSeparator: [string] + * unitSeparator: [string] + * }} [options] bytes options. + * + * @returns {string|number|null} + */ + +function bytes(value, options) { + if (typeof value === 'string') { + return parse(value); + } + + if (typeof value === 'number') { + return format(value, options); + } + + return null; +} + +/** + * Format the given value in bytes into a string. + * + * If the value is negative, it is kept as such. If it is a float, + * it is rounded. + * + * @param {number} value + * @param {object} [options] + * @param {number} [options.decimalPlaces=2] + * @param {number} [options.fixedDecimals=false] + * @param {string} [options.thousandsSeparator=] + * @param {string} [options.unit=] + * @param {string} [options.unitSeparator=] + * + * @returns {string|null} + * @public + */ + +function format(value, options) { + if (!Number.isFinite(value)) { + return null; + } + + var mag = Math.abs(value); + var thousandsSeparator = (options && options.thousandsSeparator) || ''; + var unitSeparator = (options && options.unitSeparator) || ''; + var decimalPlaces = (options && options.decimalPlaces !== undefined) ? options.decimalPlaces : 2; + var fixedDecimals = Boolean(options && options.fixedDecimals); + var unit = (options && options.unit) || ''; + + if (!unit || !map[unit.toLowerCase()]) { + if (mag >= map.pb) { + unit = 'PB'; + } else if (mag >= map.tb) { + unit = 'TB'; + } else if (mag >= map.gb) { + unit = 'GB'; + } else if (mag >= map.mb) { + unit = 'MB'; + } else if (mag >= map.kb) { + unit = 'KB'; + } else { + unit = 'B'; + } + } + + var val = value / map[unit.toLowerCase()]; + var str = val.toFixed(decimalPlaces); + + if (!fixedDecimals) { + str = str.replace(formatDecimalsRegExp, '$1'); + } + + if (thousandsSeparator) { + str = str.split('.').map(function (s, i) { + return i === 0 + ? s.replace(formatThousandsRegExp, thousandsSeparator) + : s + }).join('.'); + } + + return str + unitSeparator + unit; +} + +/** + * Parse the string value into an integer in bytes. + * + * If no unit is given, it is assumed the value is in bytes. + * + * @param {number|string} val + * + * @returns {number|null} + * @public + */ + +function parse(val) { + if (typeof val === 'number' && !isNaN(val)) { + return val; + } + + if (typeof val !== 'string') { + return null; + } + + // Test if the string passed is valid + var results = parseRegExp.exec(val); + var floatValue; + var unit = 'b'; + + if (!results) { + // Nothing could be extracted from the given string + floatValue = parseInt(val, 10); + unit = 'b' + } else { + // Retrieve the value and the unit + floatValue = parseFloat(results[1]); + unit = results[4].toLowerCase(); + } + + if (isNaN(floatValue)) { + return null; + } + + return Math.floor(map[unit] * floatValue); +} diff --git a/scorpio-broker/http_server/node_modules/bytes/package.json b/scorpio-broker/http_server/node_modules/bytes/package.json new file mode 100644 index 0000000000000000000000000000000000000000..f2b6a8b0e3c9020746409617bcd562c9368be451 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/bytes/package.json @@ -0,0 +1,42 @@ +{ + "name": "bytes", + "description": "Utility to parse a string bytes to bytes and vice-versa", + "version": "3.1.2", + "author": "TJ Holowaychuk (http://tjholowaychuk.com)", + "contributors": [ + "Jed Watson ", + "Théo FIDRY " + ], + "license": "MIT", + "keywords": [ + "byte", + "bytes", + "utility", + "parse", + "parser", + "convert", + "converter" + ], + "repository": "visionmedia/bytes.js", + "devDependencies": { + "eslint": "7.32.0", + "eslint-plugin-markdown": "2.2.1", + "mocha": "9.2.0", + "nyc": "15.1.0" + }, + "files": [ + "History.md", + "LICENSE", + "Readme.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --check-leaks --reporter spec", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/.eslintrc b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/.eslintrc new file mode 100644 index 0000000000000000000000000000000000000000..dfa9a6cdcf03e3f611cb91a59c68327d040a5d58 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/.eslintrc @@ -0,0 +1,16 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "func-name-matching": 0, + "id-length": 0, + "new-cap": [2, { + "capIsNewExceptions": [ + "GetIntrinsic", + ], + }], + "no-magic-numbers": 0, + }, +} diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/.github/FUNDING.yml b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/.github/FUNDING.yml new file mode 100644 index 0000000000000000000000000000000000000000..0011e9d65fcaefeaccd7c79a0357e8f2214695ed --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/call-bind-apply-helpers +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/.nycrc b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/.nycrc new file mode 100644 index 0000000000000000000000000000000000000000..bdd626ce91477abbdd489b79988baebadbd3c897 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/.nycrc @@ -0,0 +1,9 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "exclude": [ + "coverage", + "test" + ] +} diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/CHANGELOG.md b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/CHANGELOG.md new file mode 100644 index 0000000000000000000000000000000000000000..cf630e88e11244249eb5b2d6a401367689266557 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/CHANGELOG.md @@ -0,0 +1,23 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v1.0.1](https://github.com/ljharb/call-bind-apply-helpers/compare/v1.0.0...v1.0.1) - 2024-12-08 + +### Commits + +- [types] `reflectApply`: fix types [`4efc396`](https://github.com/ljharb/call-bind-apply-helpers/commit/4efc3965351a4f02cc55e836fa391d3d11ef2ef8) +- [Fix] `reflectApply`: oops, Reflect is not a function [`83cc739`](https://github.com/ljharb/call-bind-apply-helpers/commit/83cc7395de6b79b7730bdf092f1436f0b1263c75) +- [Dev Deps] update `@arethetypeswrong/cli` [`80bd5d3`](https://github.com/ljharb/call-bind-apply-helpers/commit/80bd5d3ae58b4f6b6995ce439dd5a1bcb178a940) + +## v1.0.0 - 2024-12-05 + +### Commits + +- Initial implementation, tests, readme [`7879629`](https://github.com/ljharb/call-bind-apply-helpers/commit/78796290f9b7430c9934d6f33d94ae9bc89fce04) +- Initial commit [`3f1dc16`](https://github.com/ljharb/call-bind-apply-helpers/commit/3f1dc164afc43285631b114a5f9dd9137b2b952f) +- npm init [`081df04`](https://github.com/ljharb/call-bind-apply-helpers/commit/081df048c312fcee400922026f6e97281200a603) +- Only apps should have lockfiles [`5b9ca0f`](https://github.com/ljharb/call-bind-apply-helpers/commit/5b9ca0fe8101ebfaf309c549caac4e0a017ed930) diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/LICENSE b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f82f38963b0d282b529f64def2bcd40be3a9a947 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/README.md b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8fc0dae1b3f6b0a22322a21b40b0f51665eadfb0 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/README.md @@ -0,0 +1,62 @@ +# call-bind-apply-helpers [![Version Badge][npm-version-svg]][package-url] + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![dependency status][deps-svg]][deps-url] +[![dev dependency status][dev-deps-svg]][dev-deps-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][npm-badge-png]][package-url] + +Helper functions around Function call/apply/bind, for use in `call-bind`. + +The only packages that should likely ever use this package directly are `call-bind` and `get-intrinsic`. +Please use `call-bind` unless you have a very good reason not to. + +## Getting started + +```sh +npm install --save call-bind-apply-helpers +``` + +## Usage/Examples + +```js +const assert = require('assert'); +const callBindBasic = require('call-bind-apply-helpers'); + +function f(a, b) { + assert.equal(this, 1); + assert.equal(a, 2); + assert.equal(b, 3); + assert.equal(arguments.length, 2); +} + +const fBound = callBindBasic([f, 1]); + +delete Function.prototype.call; +delete Function.prototype.bind; + +fBound(2, 3); +``` + +## Tests + +Clone the repo, `npm install`, and run `npm test` + +[package-url]: https://npmjs.org/package/call-bind-apply-helpers +[npm-version-svg]: https://versionbadg.es/ljharb/call-bind-apply-helpers.svg +[deps-svg]: https://david-dm.org/ljharb/call-bind-apply-helpers.svg +[deps-url]: https://david-dm.org/ljharb/call-bind-apply-helpers +[dev-deps-svg]: https://david-dm.org/ljharb/call-bind-apply-helpers/dev-status.svg +[dev-deps-url]: https://david-dm.org/ljharb/call-bind-apply-helpers#info=devDependencies +[npm-badge-png]: https://nodei.co/npm/call-bind-apply-helpers.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/call-bind-apply-helpers.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/call-bind-apply-helpers.svg +[downloads-url]: https://npm-stat.com/charts.html?package=call-bind-apply-helpers +[codecov-image]: https://codecov.io/gh/ljharb/call-bind-apply-helpers/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/ljharb/call-bind-apply-helpers/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/ljharb/call-bind-apply-helpers +[actions-url]: https://github.com/ljharb/call-bind-apply-helpers/actions diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/actualApply.d.ts b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/actualApply.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..b87286a21e3eda5e94932818c21e2bd20b30e905 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/actualApply.d.ts @@ -0,0 +1 @@ +export = Reflect.apply; \ No newline at end of file diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/actualApply.js b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/actualApply.js new file mode 100644 index 0000000000000000000000000000000000000000..ffa51355dc7e5d3eb24f602f85f6ae57d087d310 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/actualApply.js @@ -0,0 +1,10 @@ +'use strict'; + +var bind = require('function-bind'); + +var $apply = require('./functionApply'); +var $call = require('./functionCall'); +var $reflectApply = require('./reflectApply'); + +/** @type {import('./actualApply')} */ +module.exports = $reflectApply || bind.call($call, $apply); diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/applyBind.d.ts b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/applyBind.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..d176c1ab30e5193071f5f6a6b6e8aaa4402476c6 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/applyBind.d.ts @@ -0,0 +1,19 @@ +import actualApply from './actualApply'; + +type TupleSplitHead = T['length'] extends N + ? T + : T extends [...infer R, any] + ? TupleSplitHead + : never + +type TupleSplitTail = O['length'] extends N + ? T + : T extends [infer F, ...infer R] + ? TupleSplitTail<[...R], N, [...O, F]> + : never + +type TupleSplit = [TupleSplitHead, TupleSplitTail] + +declare function applyBind(...args: TupleSplit, 2>[1]): ReturnType; + +export = applyBind; \ No newline at end of file diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/applyBind.js b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/applyBind.js new file mode 100644 index 0000000000000000000000000000000000000000..d2b7723147450c8094f98d20d88a5170a4173bbb --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/applyBind.js @@ -0,0 +1,10 @@ +'use strict'; + +var bind = require('function-bind'); +var $apply = require('./functionApply'); +var actualApply = require('./actualApply'); + +/** @type {import('./applyBind')} */ +module.exports = function applyBind() { + return actualApply(bind, $apply, arguments); +}; diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/functionApply.d.ts b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/functionApply.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..1f6e11b3d0e95337b67b81b962b4c6abe69ed7ae --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/functionApply.d.ts @@ -0,0 +1 @@ +export = Function.prototype.apply; \ No newline at end of file diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/functionApply.js b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/functionApply.js new file mode 100644 index 0000000000000000000000000000000000000000..c71df9c2bcf07ca4340530058d0662661f29ec95 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/functionApply.js @@ -0,0 +1,4 @@ +'use strict'; + +/** @type {import('./functionApply')} */ +module.exports = Function.prototype.apply; diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/functionCall.d.ts b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/functionCall.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..15e93df350fb6b8552c6ba89289966a8b998064e --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/functionCall.d.ts @@ -0,0 +1 @@ +export = Function.prototype.call; \ No newline at end of file diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/functionCall.js b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/functionCall.js new file mode 100644 index 0000000000000000000000000000000000000000..7a8d8735752ef518698f1fe62b4edd16725898a8 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/functionCall.js @@ -0,0 +1,4 @@ +'use strict'; + +/** @type {import('./functionCall')} */ +module.exports = Function.prototype.call; diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/index.d.ts b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/index.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..a7ae2c57c755e4723b8d728448e3183ce7f53b36 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/index.d.ts @@ -0,0 +1,46 @@ +type RemoveFromTuple< + Tuple extends unknown[], + RemoveCount extends number, + Index extends 1[] = [] +> = Index["length"] extends RemoveCount + ? Tuple + : Tuple extends [first: unknown, ...infer Rest] + ? RemoveFromTuple + : Tuple; + +type ConcatTuples< + Prefix extends unknown[], + Suffix extends unknown[] +> = [...Prefix, ...Suffix]; + +type ReplaceThis = T extends (this: infer OldThis, ...args: infer A) => infer R + ? (this: NewThis, ...args: A) => R + : never; + +type BindFunction< + TThis, + T extends (this: TThis, ...args: any[]) => any, // Allow specific types to propagate + TBoundArgs extends unknown[], + ReceiverBound extends boolean +> = ReceiverBound extends true + ? (...args: RemoveFromTuple, TBoundArgs["length"] & number>) => ReturnType> + : (...args: ConcatTuples<[TThis], RemoveFromTuple, TBoundArgs["length"] & number>>) => ReturnType; + +declare function callBind< + TThis, + T extends (this: TThis, ...args: any[]) => any, + TBoundArgs extends Partial> +>( + args: [fn: T, thisArg: TThis, ...boundArgs: TBoundArgs] +): BindFunction; + +declare function callBind< + TThis, + T extends (this: TThis, ...args: any[]) => any, + TBoundArgs extends Partial> +>( + args: [fn: T, ...boundArgs: TBoundArgs] +): BindFunction; + +export as namespace callBind; +export = callBind; diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/index.js b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/index.js new file mode 100644 index 0000000000000000000000000000000000000000..8b6b99463c334dd73d50ced538b4c44d989adbd6 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/index.js @@ -0,0 +1,15 @@ +'use strict'; + +var bind = require('function-bind'); +var $TypeError = require('es-errors/type'); + +var $call = require('./functionCall'); +var $actualApply = require('./actualApply'); + +/** @type {import('.')} */ +module.exports = function callBindBasic(args) { + if (args.length < 1 || typeof args[0] !== 'function') { + throw new $TypeError('a function is required'); + } + return $actualApply(bind, $call, args); +}; diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/package.json b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/package.json new file mode 100644 index 0000000000000000000000000000000000000000..7398be7043856a3a2147bdefaeb23da17cf9ae0a --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/package.json @@ -0,0 +1,85 @@ +{ + "name": "call-bind-apply-helpers", + "version": "1.0.1", + "description": "Helper functions around Function call/apply/bind, for use in `call-bind`", + "main": "index.js", + "exports": { + ".": "./index.js", + "./actualApply": "./actualApply.js", + "./applyBind": "./applyBind.js", + "./functionApply": "./functionApply.js", + "./functionCall": "./functionCall.js", + "./reflectApply": "./reflectApply.js", + "./package.json": "./package.json" + }, + "scripts": { + "prepack": "npmignore --auto --commentLines=auto", + "prepublish": "not-in-publish || npm run prepublishOnly", + "prepublishOnly": "safe-publish-latest", + "prelint": "evalmd README.md", + "lint": "eslint --ext=.js,.mjs .", + "postlint": "tsc -p . && attw -P", + "pretest": "npm run lint", + "tests-only": "nyc tape 'test/**/*.js'", + "test": "npm run tests-only", + "posttest": "npx npm@'>=10.2' audit --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/ljharb/call-bind-apply-helpers.git" + }, + "author": "Jordan Harband ", + "license": "MIT", + "bugs": { + "url": "https://github.com/ljharb/call-bind-apply-helpers/issues" + }, + "homepage": "https://github.com/ljharb/call-bind-apply-helpers#readme", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "devDependencies": { + "@arethetypeswrong/cli": "^0.17.1", + "@ljharb/eslint-config": "^21.1.1", + "@ljharb/tsconfig": "^0.2.2", + "@types/for-each": "^0.3.3", + "@types/function-bind": "^1.1.10", + "@types/object-inspect": "^1.13.0", + "@types/tape": "^5.6.5", + "auto-changelog": "^2.5.0", + "encoding": "^0.1.13", + "es-value-fixtures": "^1.5.0", + "eslint": "=8.8.0", + "evalmd": "^0.0.19", + "for-each": "^0.3.3", + "has-strict-mode": "^1.0.1", + "in-publish": "^2.0.1", + "npmignore": "^0.3.1", + "nyc": "^10.3.2", + "object-inspect": "^1.13.3", + "safe-publish-latest": "^2.0.0", + "tape": "^5.9.0", + "typescript": "next" + }, + "testling": { + "files": "test/index.js" + }, + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + }, + "publishConfig": { + "ignore": [ + ".github/workflows" + ] + }, + "engines": { + "node": ">= 0.4" + } +} diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/reflectApply.d.ts b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/reflectApply.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..6b2ae764c54541ae224d93bb49e8f176260601b6 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/reflectApply.d.ts @@ -0,0 +1,3 @@ +declare const reflectApply: false | typeof Reflect.apply; + +export = reflectApply; diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/reflectApply.js b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/reflectApply.js new file mode 100644 index 0000000000000000000000000000000000000000..3d03caa695a5dd65a5aebbd2819621e757bda420 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/reflectApply.js @@ -0,0 +1,4 @@ +'use strict'; + +/** @type {import('./reflectApply')} */ +module.exports = typeof Reflect !== 'undefined' && Reflect && Reflect.apply; diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/test/index.js b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/test/index.js new file mode 100644 index 0000000000000000000000000000000000000000..8acc08a68f2a493d855ec1b4af72dab3371c8ab4 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/test/index.js @@ -0,0 +1,63 @@ +'use strict'; + +var callBind = require('../'); +var hasStrictMode = require('has-strict-mode')(); +var forEach = require('for-each'); +var inspect = require('object-inspect'); +var v = require('es-value-fixtures'); + +var test = require('tape'); + +test('callBindBasic', function (t) { + forEach(v.nonFunctions, function (nonFunction) { + t['throws']( + // @ts-expect-error + function () { callBind([nonFunction]); }, + TypeError, + inspect(nonFunction) + ' is not a function' + ); + }); + + var sentinel = { sentinel: true }; + /** @type {(this: T, a: number, b: number) => [T | undefined, number, number]} */ + var func = function (a, b) { + // eslint-disable-next-line no-invalid-this + return [!hasStrictMode && this === global ? undefined : this, a, b]; + }; + t.equal(func.length, 2, 'original function length is 2'); + + /** type {(thisArg: unknown, a: number, b: number) => [unknown, number, number]} */ + var bound = callBind([func]); + /** type {((a: number, b: number) => [sentinel, typeof a, typeof b])} */ + var boundR = callBind([func, sentinel]); + /** type {((b: number) => [sentinel, number, typeof b])} */ + var boundArg = callBind([func, sentinel, 1]); + + // @ts-expect-error + t.deepEqual(bound(), [undefined, undefined, undefined], 'bound func with no args'); + + // @ts-expect-error + t.deepEqual(func(), [undefined, undefined, undefined], 'unbound func with too few args'); + // @ts-expect-error + t.deepEqual(bound(1, 2), [hasStrictMode ? 1 : Object(1), 2, undefined], 'bound func too few args'); + // @ts-expect-error + t.deepEqual(boundR(), [sentinel, undefined, undefined], 'bound func with receiver, with too few args'); + // @ts-expect-error + t.deepEqual(boundArg(), [sentinel, 1, undefined], 'bound func with receiver and arg, with too few args'); + + t.deepEqual(func(1, 2), [undefined, 1, 2], 'unbound func with right args'); + t.deepEqual(bound(1, 2, 3), [hasStrictMode ? 1 : Object(1), 2, 3], 'bound func with right args'); + t.deepEqual(boundR(1, 2), [sentinel, 1, 2], 'bound func with receiver, with right args'); + t.deepEqual(boundArg(2), [sentinel, 1, 2], 'bound func with receiver and arg, with right arg'); + + // @ts-expect-error + t.deepEqual(func(1, 2, 3), [undefined, 1, 2], 'unbound func with too many args'); + // @ts-expect-error + t.deepEqual(bound(1, 2, 3, 4), [hasStrictMode ? 1 : Object(1), 2, 3], 'bound func with too many args'); + // @ts-expect-error + t.deepEqual(boundR(1, 2, 3), [sentinel, 1, 2], 'bound func with receiver, with too many args'); + // @ts-expect-error + t.deepEqual(boundArg(2, 3), [sentinel, 1, 2], 'bound func with receiver and arg, with too many args'); + + t.end(); +}); diff --git a/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/tsconfig.json b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/tsconfig.json new file mode 100644 index 0000000000000000000000000000000000000000..aef9993084c3643257b6d6aea21f76017caf92a7 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bind-apply-helpers/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "@ljharb/tsconfig", + "compilerOptions": { + "target": "es2021", + }, + "exclude": [ + "coverage", + ], +} \ No newline at end of file diff --git a/scorpio-broker/http_server/node_modules/call-bound/.eslintrc b/scorpio-broker/http_server/node_modules/call-bound/.eslintrc new file mode 100644 index 0000000000000000000000000000000000000000..2612ed8fefad45d3a68ae842a28e0353ebcb5cf1 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bound/.eslintrc @@ -0,0 +1,13 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "new-cap": [2, { + "capIsNewExceptions": [ + "GetIntrinsic", + ], + }], + }, +} diff --git a/scorpio-broker/http_server/node_modules/call-bound/.github/FUNDING.yml b/scorpio-broker/http_server/node_modules/call-bound/.github/FUNDING.yml new file mode 100644 index 0000000000000000000000000000000000000000..2a2a13571375d28b56577ad1f91bddabde753f33 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bound/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/call-bound +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/scorpio-broker/http_server/node_modules/call-bound/.nycrc b/scorpio-broker/http_server/node_modules/call-bound/.nycrc new file mode 100644 index 0000000000000000000000000000000000000000..bdd626ce91477abbdd489b79988baebadbd3c897 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bound/.nycrc @@ -0,0 +1,9 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "exclude": [ + "coverage", + "test" + ] +} diff --git a/scorpio-broker/http_server/node_modules/call-bound/CHANGELOG.md b/scorpio-broker/http_server/node_modules/call-bound/CHANGELOG.md new file mode 100644 index 0000000000000000000000000000000000000000..25fa7a5e12b4729c97e89056f4d6f4d726a409ab --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bound/CHANGELOG.md @@ -0,0 +1,34 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v1.0.3](https://github.com/ljharb/call-bound/compare/v1.0.2...v1.0.3) - 2024-12-15 + +### Commits + +- [Refactor] use `call-bind-apply-helpers` instead of `call-bind` [`5e0b134`](https://github.com/ljharb/call-bound/commit/5e0b13496df14fb7d05dae9412f088da8d3f75be) +- [Deps] update `get-intrinsic` [`41fc967`](https://github.com/ljharb/call-bound/commit/41fc96732a22c7b7e8f381f93ccc54bb6293be2e) +- [readme] fix example [`79a0137`](https://github.com/ljharb/call-bound/commit/79a0137723f7c6d09c9c05452bbf8d5efb5d6e49) +- [meta] add `sideEffects` flag [`08b07be`](https://github.com/ljharb/call-bound/commit/08b07be7f1c03f67dc6f3cdaf0906259771859f7) + +## [v1.0.2](https://github.com/ljharb/call-bound/compare/v1.0.1...v1.0.2) - 2024-12-10 + +### Commits + +- [Dev Deps] update `@arethetypeswrong/cli`, `@ljharb/tsconfig`, `gopd` [`e6a5ffe`](https://github.com/ljharb/call-bound/commit/e6a5ffe849368fe4f74dfd6cdeca1b9baa39e8d5) +- [Deps] update `call-bind`, `get-intrinsic` [`2aeb5b5`](https://github.com/ljharb/call-bound/commit/2aeb5b521dc2b2683d1345c753ea1161de2d1c14) +- [types] improve return type [`1a0c9fe`](https://github.com/ljharb/call-bound/commit/1a0c9fe3114471e7ca1f57d104e2efe713bb4871) + +## v1.0.1 - 2024-12-05 + +### Commits + +- Initial implementation, tests, readme, types [`6d94121`](https://github.com/ljharb/call-bound/commit/6d94121a9243602e506334069f7a03189fe3363d) +- Initial commit [`0eae867`](https://github.com/ljharb/call-bound/commit/0eae867334ea025c33e6e91cdecfc9df96680cf9) +- npm init [`71b2479`](https://github.com/ljharb/call-bound/commit/71b2479c6723e0b7d91a6b663613067e98b7b275) +- Only apps should have lockfiles [`c3754a9`](https://github.com/ljharb/call-bound/commit/c3754a949b7f9132b47e2d18c1729889736741eb) +- [actions] skip `npm ls` in node < 10 [`74275a5`](https://github.com/ljharb/call-bound/commit/74275a5186b8caf6309b6b97472bdcb0df4683a8) +- [Dev Deps] add missing peer dep [`1354de8`](https://github.com/ljharb/call-bound/commit/1354de8679413e4ae9c523d85f76fa7a5e032d97) diff --git a/scorpio-broker/http_server/node_modules/call-bound/LICENSE b/scorpio-broker/http_server/node_modules/call-bound/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f82f38963b0d282b529f64def2bcd40be3a9a947 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bound/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Jordan Harband + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/scorpio-broker/http_server/node_modules/call-bound/README.md b/scorpio-broker/http_server/node_modules/call-bound/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a44e43e56e07b9dce55d14eba09dcbbaf2024303 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bound/README.md @@ -0,0 +1,53 @@ +# call-bound [![Version Badge][npm-version-svg]][package-url] + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![dependency status][deps-svg]][deps-url] +[![dev dependency status][dev-deps-svg]][dev-deps-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][npm-badge-png]][package-url] + +Robust call-bound JavaScript intrinsics, using `call-bind` and `get-intrinsic`. + +## Getting started + +```sh +npm install --save call-bound +``` + +## Usage/Examples + +```js +const assert = require('assert'); +const callBound = require('call-bound'); + +const slice = callBound('Array.prototype.slice'); + +delete Function.prototype.call; +delete Function.prototype.bind; +delete Array.prototype.slice; + +assert.deepEqual(slice([1, 2, 3, 4], 1, -1), [2, 3]); +``` + +## Tests + +Clone the repo, `npm install`, and run `npm test` + +[package-url]: https://npmjs.org/package/call-bound +[npm-version-svg]: https://versionbadg.es/ljharb/call-bound.svg +[deps-svg]: https://david-dm.org/ljharb/call-bound.svg +[deps-url]: https://david-dm.org/ljharb/call-bound +[dev-deps-svg]: https://david-dm.org/ljharb/call-bound/dev-status.svg +[dev-deps-url]: https://david-dm.org/ljharb/call-bound#info=devDependencies +[npm-badge-png]: https://nodei.co/npm/call-bound.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/call-bound.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/call-bound.svg +[downloads-url]: https://npm-stat.com/charts.html?package=call-bound +[codecov-image]: https://codecov.io/gh/ljharb/call-bound/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/ljharb/call-bound/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/ljharb/call-bound +[actions-url]: https://github.com/ljharb/call-bound/actions diff --git a/scorpio-broker/http_server/node_modules/call-bound/index.d.ts b/scorpio-broker/http_server/node_modules/call-bound/index.d.ts new file mode 100644 index 0000000000000000000000000000000000000000..e3d772ce58fe8da22206d3eaf123cdbc2c042172 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bound/index.d.ts @@ -0,0 +1,13 @@ +import callBind from 'call-bind-apply-helpers'; + +declare function callBoundIntrinsic( + name: string, + allowMissing?: false +): ReturnType; + +declare function callBoundIntrinsic( + name: string, + allowMissing: true +): undefined | ReturnType; + +export = callBoundIntrinsic; \ No newline at end of file diff --git a/scorpio-broker/http_server/node_modules/call-bound/index.js b/scorpio-broker/http_server/node_modules/call-bound/index.js new file mode 100644 index 0000000000000000000000000000000000000000..3bb40121a79c9d6b885d628e50b0be2ce2614ec8 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bound/index.js @@ -0,0 +1,18 @@ +'use strict'; + +var GetIntrinsic = require('get-intrinsic'); + +var callBindBasic = require('call-bind-apply-helpers'); + +/** @type {(thisArg: string, searchString: string, position?: number) => number} */ +var $indexOf = callBindBasic([GetIntrinsic('%String.prototype.indexOf%')]); + +/** @type {import('.')} */ +module.exports = function callBoundIntrinsic(name, allowMissing) { + // eslint-disable-next-line no-extra-parens + var intrinsic = /** @type {Parameters[0][0]} */ (GetIntrinsic(name, !!allowMissing)); + if (typeof intrinsic === 'function' && $indexOf(name, '.prototype.') > -1) { + return callBindBasic([intrinsic]); + } + return intrinsic; +}; diff --git a/scorpio-broker/http_server/node_modules/call-bound/package.json b/scorpio-broker/http_server/node_modules/call-bound/package.json new file mode 100644 index 0000000000000000000000000000000000000000..2893ed11ab2cdd2a6ebf7109404d4968bace4b64 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bound/package.json @@ -0,0 +1,99 @@ +{ + "name": "call-bound", + "version": "1.0.3", + "description": "Robust call-bound JavaScript intrinsics, using `call-bind` and `get-intrinsic`.", + "main": "index.js", + "exports": { + ".": "./index.js", + "./package.json": "./package.json" + }, + "sideEffects": false, + "scripts": { + "prepack": "npmignore --auto --commentLines=auto", + "prepublish": "not-in-publish || npm run prepublishOnly", + "prepublishOnly": "safe-publish-latest", + "prelint": "evalmd README.md", + "lint": "eslint --ext=.js,.mjs .", + "postlint": "tsc -p . && attw -P", + "pretest": "npm run lint", + "tests-only": "nyc tape 'test/**/*.js'", + "test": "npm run tests-only", + "posttest": "npx npm@'>=10.2' audit --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/ljharb/call-bound.git" + }, + "keywords": [ + "javascript", + "ecmascript", + "es", + "js", + "callbind", + "callbound", + "call", + "bind", + "bound", + "call-bind", + "call-bound", + "function", + "es-abstract" + ], + "author": "Jordan Harband ", + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/ljharb/call-bound/issues" + }, + "homepage": "https://github.com/ljharb/call-bound#readme", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "get-intrinsic": "^1.2.6" + }, + "devDependencies": { + "@arethetypeswrong/cli": "^0.17.1", + "@ljharb/eslint-config": "^21.1.1", + "@ljharb/tsconfig": "^0.2.2", + "@types/call-bind": "^1.0.5", + "@types/get-intrinsic": "^1.2.3", + "@types/tape": "^5.6.5", + "auto-changelog": "^2.5.0", + "encoding": "^0.1.13", + "es-value-fixtures": "^1.5.0", + "eslint": "=8.8.0", + "evalmd": "^0.0.19", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-strict-mode": "^1.0.1", + "in-publish": "^2.0.1", + "npmignore": "^0.3.1", + "nyc": "^10.3.2", + "object-inspect": "^1.13.3", + "safe-publish-latest": "^2.0.0", + "tape": "^5.9.0", + "typescript": "next" + }, + "testling": { + "files": "test/index.js" + }, + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + }, + "publishConfig": { + "ignore": [ + ".github/workflows" + ] + }, + "engines": { + "node": ">= 0.4" + } +} diff --git a/scorpio-broker/http_server/node_modules/call-bound/test/index.js b/scorpio-broker/http_server/node_modules/call-bound/test/index.js new file mode 100644 index 0000000000000000000000000000000000000000..36f5f0b973382e2c32d03afef1dd0187dee4a1aa --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bound/test/index.js @@ -0,0 +1,54 @@ +'use strict'; + +var test = require('tape'); + +var callBound = require('../'); + +test('callBound', function (t) { + // static primitive + t.equal(callBound('Array.length'), Array.length, 'Array.length yields itself'); + t.equal(callBound('%Array.length%'), Array.length, '%Array.length% yields itself'); + + // static non-function object + t.equal(callBound('Array.prototype'), Array.prototype, 'Array.prototype yields itself'); + t.equal(callBound('%Array.prototype%'), Array.prototype, '%Array.prototype% yields itself'); + t.equal(callBound('Array.constructor'), Array.constructor, 'Array.constructor yields itself'); + t.equal(callBound('%Array.constructor%'), Array.constructor, '%Array.constructor% yields itself'); + + // static function + t.equal(callBound('Date.parse'), Date.parse, 'Date.parse yields itself'); + t.equal(callBound('%Date.parse%'), Date.parse, '%Date.parse% yields itself'); + + // prototype primitive + t.equal(callBound('Error.prototype.message'), Error.prototype.message, 'Error.prototype.message yields itself'); + t.equal(callBound('%Error.prototype.message%'), Error.prototype.message, '%Error.prototype.message% yields itself'); + + // prototype function + t.notEqual(callBound('Object.prototype.toString'), Object.prototype.toString, 'Object.prototype.toString does not yield itself'); + t.notEqual(callBound('%Object.prototype.toString%'), Object.prototype.toString, '%Object.prototype.toString% does not yield itself'); + t.equal(callBound('Object.prototype.toString')(true), Object.prototype.toString.call(true), 'call-bound Object.prototype.toString calls into the original'); + t.equal(callBound('%Object.prototype.toString%')(true), Object.prototype.toString.call(true), 'call-bound %Object.prototype.toString% calls into the original'); + + t['throws']( + function () { callBound('does not exist'); }, + SyntaxError, + 'nonexistent intrinsic throws' + ); + t['throws']( + function () { callBound('does not exist', true); }, + SyntaxError, + 'allowMissing arg still throws for unknown intrinsic' + ); + + t.test('real but absent intrinsic', { skip: typeof WeakRef !== 'undefined' }, function (st) { + st['throws']( + function () { callBound('WeakRef'); }, + TypeError, + 'real but absent intrinsic throws' + ); + st.equal(callBound('WeakRef', true), undefined, 'allowMissing arg avoids exception'); + st.end(); + }); + + t.end(); +}); diff --git a/scorpio-broker/http_server/node_modules/call-bound/tsconfig.json b/scorpio-broker/http_server/node_modules/call-bound/tsconfig.json new file mode 100644 index 0000000000000000000000000000000000000000..d9a6668c3ae6be748c78fbd5694053706b3d6942 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/call-bound/tsconfig.json @@ -0,0 +1,9 @@ +{ + "extends": "@ljharb/tsconfig", + "compilerOptions": { + "target": "es2021", + }, + "exclude": [ + "coverage", + ], +} diff --git a/scorpio-broker/http_server/node_modules/content-disposition/HISTORY.md b/scorpio-broker/http_server/node_modules/content-disposition/HISTORY.md new file mode 100644 index 0000000000000000000000000000000000000000..488effa0c9440f4e214102980665781a62ba7059 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/content-disposition/HISTORY.md @@ -0,0 +1,60 @@ +0.5.4 / 2021-12-10 +================== + + * deps: safe-buffer@5.2.1 + +0.5.3 / 2018-12-17 +================== + + * Use `safe-buffer` for improved Buffer API + +0.5.2 / 2016-12-08 +================== + + * Fix `parse` to accept any linear whitespace character + +0.5.1 / 2016-01-17 +================== + + * perf: enable strict mode + +0.5.0 / 2014-10-11 +================== + + * Add `parse` function + +0.4.0 / 2014-09-21 +================== + + * Expand non-Unicode `filename` to the full ISO-8859-1 charset + +0.3.0 / 2014-09-20 +================== + + * Add `fallback` option + * Add `type` option + +0.2.0 / 2014-09-19 +================== + + * Reduce ambiguity of file names with hex escape in buggy browsers + +0.1.2 / 2014-09-19 +================== + + * Fix periodic invalid Unicode filename header + +0.1.1 / 2014-09-19 +================== + + * Fix invalid characters appearing in `filename*` parameter + +0.1.0 / 2014-09-18 +================== + + * Make the `filename` argument optional + +0.0.0 / 2014-09-18 +================== + + * Initial release diff --git a/scorpio-broker/http_server/node_modules/content-disposition/LICENSE b/scorpio-broker/http_server/node_modules/content-disposition/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..84441fbb5709262c2bfc9b5ff0166ad4f024a1b8 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/content-disposition/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014-2017 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/scorpio-broker/http_server/node_modules/content-disposition/README.md b/scorpio-broker/http_server/node_modules/content-disposition/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3a0bb055949cdaed008f0f85e111624214213873 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/content-disposition/README.md @@ -0,0 +1,142 @@ +# content-disposition + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][github-actions-ci-image]][github-actions-ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Create and parse HTTP `Content-Disposition` header + +## Installation + +```sh +$ npm install content-disposition +``` + +## API + +```js +var contentDisposition = require('content-disposition') +``` + +### contentDisposition(filename, options) + +Create an attachment `Content-Disposition` header value using the given file name, +if supplied. The `filename` is optional and if no file name is desired, but you +want to specify `options`, set `filename` to `undefined`. + +```js +res.setHeader('Content-Disposition', contentDisposition('∫ maths.pdf')) +``` + +**note** HTTP headers are of the ISO-8859-1 character set. If you are writing this +header through a means different from `setHeader` in Node.js, you'll want to specify +the `'binary'` encoding in Node.js. + +#### Options + +`contentDisposition` accepts these properties in the options object. + +##### fallback + +If the `filename` option is outside ISO-8859-1, then the file name is actually +stored in a supplemental field for clients that support Unicode file names and +a ISO-8859-1 version of the file name is automatically generated. + +This specifies the ISO-8859-1 file name to override the automatic generation or +disables the generation all together, defaults to `true`. + + - A string will specify the ISO-8859-1 file name to use in place of automatic + generation. + - `false` will disable including a ISO-8859-1 file name and only include the + Unicode version (unless the file name is already ISO-8859-1). + - `true` will enable automatic generation if the file name is outside ISO-8859-1. + +If the `filename` option is ISO-8859-1 and this option is specified and has a +different value, then the `filename` option is encoded in the extended field +and this set as the fallback field, even though they are both ISO-8859-1. + +##### type + +Specifies the disposition type, defaults to `"attachment"`. This can also be +`"inline"`, or any other value (all values except inline are treated like +`attachment`, but can convey additional information if both parties agree to +it). The type is normalized to lower-case. + +### contentDisposition.parse(string) + +```js +var disposition = contentDisposition.parse('attachment; filename="EURO rates.txt"; filename*=UTF-8\'\'%e2%82%ac%20rates.txt') +``` + +Parse a `Content-Disposition` header string. This automatically handles extended +("Unicode") parameters by decoding them and providing them under the standard +parameter name. This will return an object with the following properties (examples +are shown for the string `'attachment; filename="EURO rates.txt"; filename*=UTF-8\'\'%e2%82%ac%20rates.txt'`): + + - `type`: The disposition type (always lower case). Example: `'attachment'` + + - `parameters`: An object of the parameters in the disposition (name of parameter + always lower case and extended versions replace non-extended versions). Example: + `{filename: "€ rates.txt"}` + +## Examples + +### Send a file for download + +```js +var contentDisposition = require('content-disposition') +var destroy = require('destroy') +var fs = require('fs') +var http = require('http') +var onFinished = require('on-finished') + +var filePath = '/path/to/public/plans.pdf' + +http.createServer(function onRequest (req, res) { + // set headers + res.setHeader('Content-Type', 'application/pdf') + res.setHeader('Content-Disposition', contentDisposition(filePath)) + + // send file + var stream = fs.createReadStream(filePath) + stream.pipe(res) + onFinished(res, function () { + destroy(stream) + }) +}) +``` + +## Testing + +```sh +$ npm test +``` + +## References + +- [RFC 2616: Hypertext Transfer Protocol -- HTTP/1.1][rfc-2616] +- [RFC 5987: Character Set and Language Encoding for Hypertext Transfer Protocol (HTTP) Header Field Parameters][rfc-5987] +- [RFC 6266: Use of the Content-Disposition Header Field in the Hypertext Transfer Protocol (HTTP)][rfc-6266] +- [Test Cases for HTTP Content-Disposition header field (RFC 6266) and the Encodings defined in RFCs 2047, 2231 and 5987][tc-2231] + +[rfc-2616]: https://tools.ietf.org/html/rfc2616 +[rfc-5987]: https://tools.ietf.org/html/rfc5987 +[rfc-6266]: https://tools.ietf.org/html/rfc6266 +[tc-2231]: http://greenbytes.de/tech/tc2231/ + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/content-disposition.svg +[npm-url]: https://npmjs.org/package/content-disposition +[node-version-image]: https://img.shields.io/node/v/content-disposition.svg +[node-version-url]: https://nodejs.org/en/download +[coveralls-image]: https://img.shields.io/coveralls/jshttp/content-disposition.svg +[coveralls-url]: https://coveralls.io/r/jshttp/content-disposition?branch=master +[downloads-image]: https://img.shields.io/npm/dm/content-disposition.svg +[downloads-url]: https://npmjs.org/package/content-disposition +[github-actions-ci-image]: https://img.shields.io/github/workflow/status/jshttp/content-disposition/ci/master?label=ci +[github-actions-ci-url]: https://github.com/jshttp/content-disposition?query=workflow%3Aci diff --git a/scorpio-broker/http_server/node_modules/content-disposition/index.js b/scorpio-broker/http_server/node_modules/content-disposition/index.js new file mode 100644 index 0000000000000000000000000000000000000000..ecec899a992d46f2e903a87475b1c342f2ce4d30 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/content-disposition/index.js @@ -0,0 +1,458 @@ +/*! + * content-disposition + * Copyright(c) 2014-2017 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = contentDisposition +module.exports.parse = parse + +/** + * Module dependencies. + * @private + */ + +var basename = require('path').basename +var Buffer = require('safe-buffer').Buffer + +/** + * RegExp to match non attr-char, *after* encodeURIComponent (i.e. not including "%") + * @private + */ + +var ENCODE_URL_ATTR_CHAR_REGEXP = /[\x00-\x20"'()*,/:;<=>?@[\\\]{}\x7f]/g // eslint-disable-line no-control-regex + +/** + * RegExp to match percent encoding escape. + * @private + */ + +var HEX_ESCAPE_REGEXP = /%[0-9A-Fa-f]{2}/ +var HEX_ESCAPE_REPLACE_REGEXP = /%([0-9A-Fa-f]{2})/g + +/** + * RegExp to match non-latin1 characters. + * @private + */ + +var NON_LATIN1_REGEXP = /[^\x20-\x7e\xa0-\xff]/g + +/** + * RegExp to match quoted-pair in RFC 2616 + * + * quoted-pair = "\" CHAR + * CHAR = + * @private + */ + +var QESC_REGEXP = /\\([\u0000-\u007f])/g // eslint-disable-line no-control-regex + +/** + * RegExp to match chars that must be quoted-pair in RFC 2616 + * @private + */ + +var QUOTE_REGEXP = /([\\"])/g + +/** + * RegExp for various RFC 2616 grammar + * + * parameter = token "=" ( token | quoted-string ) + * token = 1* + * separators = "(" | ")" | "<" | ">" | "@" + * | "," | ";" | ":" | "\" | <"> + * | "/" | "[" | "]" | "?" | "=" + * | "{" | "}" | SP | HT + * quoted-string = ( <"> *(qdtext | quoted-pair ) <"> ) + * qdtext = > + * quoted-pair = "\" CHAR + * CHAR = + * TEXT = + * LWS = [CRLF] 1*( SP | HT ) + * CRLF = CR LF + * CR = + * LF = + * SP = + * HT = + * CTL = + * OCTET = + * @private + */ + +var PARAM_REGEXP = /;[\x09\x20]*([!#$%&'*+.0-9A-Z^_`a-z|~-]+)[\x09\x20]*=[\x09\x20]*("(?:[\x20!\x23-\x5b\x5d-\x7e\x80-\xff]|\\[\x20-\x7e])*"|[!#$%&'*+.0-9A-Z^_`a-z|~-]+)[\x09\x20]*/g // eslint-disable-line no-control-regex +var TEXT_REGEXP = /^[\x20-\x7e\x80-\xff]+$/ +var TOKEN_REGEXP = /^[!#$%&'*+.0-9A-Z^_`a-z|~-]+$/ + +/** + * RegExp for various RFC 5987 grammar + * + * ext-value = charset "'" [ language ] "'" value-chars + * charset = "UTF-8" / "ISO-8859-1" / mime-charset + * mime-charset = 1*mime-charsetc + * mime-charsetc = ALPHA / DIGIT + * / "!" / "#" / "$" / "%" / "&" + * / "+" / "-" / "^" / "_" / "`" + * / "{" / "}" / "~" + * language = ( 2*3ALPHA [ extlang ] ) + * / 4ALPHA + * / 5*8ALPHA + * extlang = *3( "-" 3ALPHA ) + * value-chars = *( pct-encoded / attr-char ) + * pct-encoded = "%" HEXDIG HEXDIG + * attr-char = ALPHA / DIGIT + * / "!" / "#" / "$" / "&" / "+" / "-" / "." + * / "^" / "_" / "`" / "|" / "~" + * @private + */ + +var EXT_VALUE_REGEXP = /^([A-Za-z0-9!#$%&+\-^_`{}~]+)'(?:[A-Za-z]{2,3}(?:-[A-Za-z]{3}){0,3}|[A-Za-z]{4,8}|)'((?:%[0-9A-Fa-f]{2}|[A-Za-z0-9!#$&+.^_`|~-])+)$/ + +/** + * RegExp for various RFC 6266 grammar + * + * disposition-type = "inline" | "attachment" | disp-ext-type + * disp-ext-type = token + * disposition-parm = filename-parm | disp-ext-parm + * filename-parm = "filename" "=" value + * | "filename*" "=" ext-value + * disp-ext-parm = token "=" value + * | ext-token "=" ext-value + * ext-token = + * @private + */ + +var DISPOSITION_TYPE_REGEXP = /^([!#$%&'*+.0-9A-Z^_`a-z|~-]+)[\x09\x20]*(?:$|;)/ // eslint-disable-line no-control-regex + +/** + * Create an attachment Content-Disposition header. + * + * @param {string} [filename] + * @param {object} [options] + * @param {string} [options.type=attachment] + * @param {string|boolean} [options.fallback=true] + * @return {string} + * @public + */ + +function contentDisposition (filename, options) { + var opts = options || {} + + // get type + var type = opts.type || 'attachment' + + // get parameters + var params = createparams(filename, opts.fallback) + + // format into string + return format(new ContentDisposition(type, params)) +} + +/** + * Create parameters object from filename and fallback. + * + * @param {string} [filename] + * @param {string|boolean} [fallback=true] + * @return {object} + * @private + */ + +function createparams (filename, fallback) { + if (filename === undefined) { + return + } + + var params = {} + + if (typeof filename !== 'string') { + throw new TypeError('filename must be a string') + } + + // fallback defaults to true + if (fallback === undefined) { + fallback = true + } + + if (typeof fallback !== 'string' && typeof fallback !== 'boolean') { + throw new TypeError('fallback must be a string or boolean') + } + + if (typeof fallback === 'string' && NON_LATIN1_REGEXP.test(fallback)) { + throw new TypeError('fallback must be ISO-8859-1 string') + } + + // restrict to file base name + var name = basename(filename) + + // determine if name is suitable for quoted string + var isQuotedString = TEXT_REGEXP.test(name) + + // generate fallback name + var fallbackName = typeof fallback !== 'string' + ? fallback && getlatin1(name) + : basename(fallback) + var hasFallback = typeof fallbackName === 'string' && fallbackName !== name + + // set extended filename parameter + if (hasFallback || !isQuotedString || HEX_ESCAPE_REGEXP.test(name)) { + params['filename*'] = name + } + + // set filename parameter + if (isQuotedString || hasFallback) { + params.filename = hasFallback + ? fallbackName + : name + } + + return params +} + +/** + * Format object to Content-Disposition header. + * + * @param {object} obj + * @param {string} obj.type + * @param {object} [obj.parameters] + * @return {string} + * @private + */ + +function format (obj) { + var parameters = obj.parameters + var type = obj.type + + if (!type || typeof type !== 'string' || !TOKEN_REGEXP.test(type)) { + throw new TypeError('invalid type') + } + + // start with normalized type + var string = String(type).toLowerCase() + + // append parameters + if (parameters && typeof parameters === 'object') { + var param + var params = Object.keys(parameters).sort() + + for (var i = 0; i < params.length; i++) { + param = params[i] + + var val = param.substr(-1) === '*' + ? ustring(parameters[param]) + : qstring(parameters[param]) + + string += '; ' + param + '=' + val + } + } + + return string +} + +/** + * Decode a RFC 5987 field value (gracefully). + * + * @param {string} str + * @return {string} + * @private + */ + +function decodefield (str) { + var match = EXT_VALUE_REGEXP.exec(str) + + if (!match) { + throw new TypeError('invalid extended field value') + } + + var charset = match[1].toLowerCase() + var encoded = match[2] + var value + + // to binary string + var binary = encoded.replace(HEX_ESCAPE_REPLACE_REGEXP, pdecode) + + switch (charset) { + case 'iso-8859-1': + value = getlatin1(binary) + break + case 'utf-8': + value = Buffer.from(binary, 'binary').toString('utf8') + break + default: + throw new TypeError('unsupported charset in extended field') + } + + return value +} + +/** + * Get ISO-8859-1 version of string. + * + * @param {string} val + * @return {string} + * @private + */ + +function getlatin1 (val) { + // simple Unicode -> ISO-8859-1 transformation + return String(val).replace(NON_LATIN1_REGEXP, '?') +} + +/** + * Parse Content-Disposition header string. + * + * @param {string} string + * @return {object} + * @public + */ + +function parse (string) { + if (!string || typeof string !== 'string') { + throw new TypeError('argument string is required') + } + + var match = DISPOSITION_TYPE_REGEXP.exec(string) + + if (!match) { + throw new TypeError('invalid type format') + } + + // normalize type + var index = match[0].length + var type = match[1].toLowerCase() + + var key + var names = [] + var params = {} + var value + + // calculate index to start at + index = PARAM_REGEXP.lastIndex = match[0].substr(-1) === ';' + ? index - 1 + : index + + // match parameters + while ((match = PARAM_REGEXP.exec(string))) { + if (match.index !== index) { + throw new TypeError('invalid parameter format') + } + + index += match[0].length + key = match[1].toLowerCase() + value = match[2] + + if (names.indexOf(key) !== -1) { + throw new TypeError('invalid duplicate parameter') + } + + names.push(key) + + if (key.indexOf('*') + 1 === key.length) { + // decode extended value + key = key.slice(0, -1) + value = decodefield(value) + + // overwrite existing value + params[key] = value + continue + } + + if (typeof params[key] === 'string') { + continue + } + + if (value[0] === '"') { + // remove quotes and escapes + value = value + .substr(1, value.length - 2) + .replace(QESC_REGEXP, '$1') + } + + params[key] = value + } + + if (index !== -1 && index !== string.length) { + throw new TypeError('invalid parameter format') + } + + return new ContentDisposition(type, params) +} + +/** + * Percent decode a single character. + * + * @param {string} str + * @param {string} hex + * @return {string} + * @private + */ + +function pdecode (str, hex) { + return String.fromCharCode(parseInt(hex, 16)) +} + +/** + * Percent encode a single character. + * + * @param {string} char + * @return {string} + * @private + */ + +function pencode (char) { + return '%' + String(char) + .charCodeAt(0) + .toString(16) + .toUpperCase() +} + +/** + * Quote a string for HTTP. + * + * @param {string} val + * @return {string} + * @private + */ + +function qstring (val) { + var str = String(val) + + return '"' + str.replace(QUOTE_REGEXP, '\\$1') + '"' +} + +/** + * Encode a Unicode string for HTTP (RFC 5987). + * + * @param {string} val + * @return {string} + * @private + */ + +function ustring (val) { + var str = String(val) + + // percent encode as UTF-8 + var encoded = encodeURIComponent(str) + .replace(ENCODE_URL_ATTR_CHAR_REGEXP, pencode) + + return 'UTF-8\'\'' + encoded +} + +/** + * Class for parsed Content-Disposition header for v8 optimization + * + * @public + * @param {string} type + * @param {object} parameters + * @constructor + */ + +function ContentDisposition (type, parameters) { + this.type = type + this.parameters = parameters +} diff --git a/scorpio-broker/http_server/node_modules/content-disposition/package.json b/scorpio-broker/http_server/node_modules/content-disposition/package.json new file mode 100644 index 0000000000000000000000000000000000000000..43c70ce24a45a9a8f9eec7c6b6a30e0324d3078d --- /dev/null +++ b/scorpio-broker/http_server/node_modules/content-disposition/package.json @@ -0,0 +1,44 @@ +{ + "name": "content-disposition", + "description": "Create and parse Content-Disposition header", + "version": "0.5.4", + "author": "Douglas Christopher Wilson ", + "license": "MIT", + "keywords": [ + "content-disposition", + "http", + "rfc6266", + "res" + ], + "repository": "jshttp/content-disposition", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "devDependencies": { + "deep-equal": "1.0.1", + "eslint": "7.32.0", + "eslint-config-standard": "13.0.1", + "eslint-plugin-import": "2.25.3", + "eslint-plugin-markdown": "2.2.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "5.2.0", + "eslint-plugin-standard": "4.1.0", + "istanbul": "0.4.5", + "mocha": "9.1.3" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-ci": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/" + } +} diff --git a/scorpio-broker/http_server/node_modules/content-type/HISTORY.md b/scorpio-broker/http_server/node_modules/content-type/HISTORY.md new file mode 100644 index 0000000000000000000000000000000000000000..458367139eb9f0af3daa5449ff0a3d9e2e189582 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/content-type/HISTORY.md @@ -0,0 +1,29 @@ +1.0.5 / 2023-01-29 +================== + + * perf: skip value escaping when unnecessary + +1.0.4 / 2017-09-11 +================== + + * perf: skip parameter parsing when no parameters + +1.0.3 / 2017-09-10 +================== + + * perf: remove argument reassignment + +1.0.2 / 2016-05-09 +================== + + * perf: enable strict mode + +1.0.1 / 2015-02-13 +================== + + * Improve missing `Content-Type` header error message + +1.0.0 / 2015-02-01 +================== + + * Initial implementation, derived from `media-typer@0.3.0` diff --git a/scorpio-broker/http_server/node_modules/content-type/LICENSE b/scorpio-broker/http_server/node_modules/content-type/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..34b1a2de37216b60b749c23b6f894e51d701ecf0 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/content-type/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/scorpio-broker/http_server/node_modules/content-type/README.md b/scorpio-broker/http_server/node_modules/content-type/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c1a922a9afba84293f449dc4b661124fbac2fd5d --- /dev/null +++ b/scorpio-broker/http_server/node_modules/content-type/README.md @@ -0,0 +1,94 @@ +# content-type + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][ci-image]][ci-url] +[![Coverage Status][coveralls-image]][coveralls-url] + +Create and parse HTTP Content-Type header according to RFC 7231 + +## Installation + +```sh +$ npm install content-type +``` + +## API + +```js +var contentType = require('content-type') +``` + +### contentType.parse(string) + +```js +var obj = contentType.parse('image/svg+xml; charset=utf-8') +``` + +Parse a `Content-Type` header. This will return an object with the following +properties (examples are shown for the string `'image/svg+xml; charset=utf-8'`): + + - `type`: The media type (the type and subtype, always lower case). + Example: `'image/svg+xml'` + + - `parameters`: An object of the parameters in the media type (name of parameter + always lower case). Example: `{charset: 'utf-8'}` + +Throws a `TypeError` if the string is missing or invalid. + +### contentType.parse(req) + +```js +var obj = contentType.parse(req) +``` + +Parse the `Content-Type` header from the given `req`. Short-cut for +`contentType.parse(req.headers['content-type'])`. + +Throws a `TypeError` if the `Content-Type` header is missing or invalid. + +### contentType.parse(res) + +```js +var obj = contentType.parse(res) +``` + +Parse the `Content-Type` header set on the given `res`. Short-cut for +`contentType.parse(res.getHeader('content-type'))`. + +Throws a `TypeError` if the `Content-Type` header is missing or invalid. + +### contentType.format(obj) + +```js +var str = contentType.format({ + type: 'image/svg+xml', + parameters: { charset: 'utf-8' } +}) +``` + +Format an object into a `Content-Type` header. This will return a string of the +content type for the given object with the following properties (examples are +shown that produce the string `'image/svg+xml; charset=utf-8'`): + + - `type`: The media type (will be lower-cased). Example: `'image/svg+xml'` + + - `parameters`: An object of the parameters in the media type (name of the + parameter will be lower-cased). Example: `{charset: 'utf-8'}` + +Throws a `TypeError` if the object contains an invalid type or parameter names. + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/jshttp/content-type/master?label=ci +[ci-url]: https://github.com/jshttp/content-type/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/content-type/master +[coveralls-url]: https://coveralls.io/r/jshttp/content-type?branch=master +[node-image]: https://badgen.net/npm/node/content-type +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/content-type +[npm-url]: https://npmjs.org/package/content-type +[npm-version-image]: https://badgen.net/npm/v/content-type diff --git a/scorpio-broker/http_server/node_modules/content-type/index.js b/scorpio-broker/http_server/node_modules/content-type/index.js new file mode 100644 index 0000000000000000000000000000000000000000..41840e7bc3e48cda894597cd18e562a37a174f7c --- /dev/null +++ b/scorpio-broker/http_server/node_modules/content-type/index.js @@ -0,0 +1,225 @@ +/*! + * content-type + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * RegExp to match *( ";" parameter ) in RFC 7231 sec 3.1.1.1 + * + * parameter = token "=" ( token / quoted-string ) + * token = 1*tchar + * tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" + * / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" + * / DIGIT / ALPHA + * ; any VCHAR, except delimiters + * quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE + * qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text + * obs-text = %x80-FF + * quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) + */ +var PARAM_REGEXP = /; *([!#$%&'*+.^_`|~0-9A-Za-z-]+) *= *("(?:[\u000b\u0020\u0021\u0023-\u005b\u005d-\u007e\u0080-\u00ff]|\\[\u000b\u0020-\u00ff])*"|[!#$%&'*+.^_`|~0-9A-Za-z-]+) */g // eslint-disable-line no-control-regex +var TEXT_REGEXP = /^[\u000b\u0020-\u007e\u0080-\u00ff]+$/ // eslint-disable-line no-control-regex +var TOKEN_REGEXP = /^[!#$%&'*+.^_`|~0-9A-Za-z-]+$/ + +/** + * RegExp to match quoted-pair in RFC 7230 sec 3.2.6 + * + * quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) + * obs-text = %x80-FF + */ +var QESC_REGEXP = /\\([\u000b\u0020-\u00ff])/g // eslint-disable-line no-control-regex + +/** + * RegExp to match chars that must be quoted-pair in RFC 7230 sec 3.2.6 + */ +var QUOTE_REGEXP = /([\\"])/g + +/** + * RegExp to match type in RFC 7231 sec 3.1.1.1 + * + * media-type = type "/" subtype + * type = token + * subtype = token + */ +var TYPE_REGEXP = /^[!#$%&'*+.^_`|~0-9A-Za-z-]+\/[!#$%&'*+.^_`|~0-9A-Za-z-]+$/ + +/** + * Module exports. + * @public + */ + +exports.format = format +exports.parse = parse + +/** + * Format object to media type. + * + * @param {object} obj + * @return {string} + * @public + */ + +function format (obj) { + if (!obj || typeof obj !== 'object') { + throw new TypeError('argument obj is required') + } + + var parameters = obj.parameters + var type = obj.type + + if (!type || !TYPE_REGEXP.test(type)) { + throw new TypeError('invalid type') + } + + var string = type + + // append parameters + if (parameters && typeof parameters === 'object') { + var param + var params = Object.keys(parameters).sort() + + for (var i = 0; i < params.length; i++) { + param = params[i] + + if (!TOKEN_REGEXP.test(param)) { + throw new TypeError('invalid parameter name') + } + + string += '; ' + param + '=' + qstring(parameters[param]) + } + } + + return string +} + +/** + * Parse media type to object. + * + * @param {string|object} string + * @return {Object} + * @public + */ + +function parse (string) { + if (!string) { + throw new TypeError('argument string is required') + } + + // support req/res-like objects as argument + var header = typeof string === 'object' + ? getcontenttype(string) + : string + + if (typeof header !== 'string') { + throw new TypeError('argument string is required to be a string') + } + + var index = header.indexOf(';') + var type = index !== -1 + ? header.slice(0, index).trim() + : header.trim() + + if (!TYPE_REGEXP.test(type)) { + throw new TypeError('invalid media type') + } + + var obj = new ContentType(type.toLowerCase()) + + // parse parameters + if (index !== -1) { + var key + var match + var value + + PARAM_REGEXP.lastIndex = index + + while ((match = PARAM_REGEXP.exec(header))) { + if (match.index !== index) { + throw new TypeError('invalid parameter format') + } + + index += match[0].length + key = match[1].toLowerCase() + value = match[2] + + if (value.charCodeAt(0) === 0x22 /* " */) { + // remove quotes + value = value.slice(1, -1) + + // remove escapes + if (value.indexOf('\\') !== -1) { + value = value.replace(QESC_REGEXP, '$1') + } + } + + obj.parameters[key] = value + } + + if (index !== header.length) { + throw new TypeError('invalid parameter format') + } + } + + return obj +} + +/** + * Get content-type from req/res objects. + * + * @param {object} + * @return {Object} + * @private + */ + +function getcontenttype (obj) { + var header + + if (typeof obj.getHeader === 'function') { + // res-like + header = obj.getHeader('content-type') + } else if (typeof obj.headers === 'object') { + // req-like + header = obj.headers && obj.headers['content-type'] + } + + if (typeof header !== 'string') { + throw new TypeError('content-type header is missing from object') + } + + return header +} + +/** + * Quote a string if necessary. + * + * @param {string} val + * @return {string} + * @private + */ + +function qstring (val) { + var str = String(val) + + // no need to quote tokens + if (TOKEN_REGEXP.test(str)) { + return str + } + + if (str.length > 0 && !TEXT_REGEXP.test(str)) { + throw new TypeError('invalid parameter value') + } + + return '"' + str.replace(QUOTE_REGEXP, '\\$1') + '"' +} + +/** + * Class to represent a content type. + * @private + */ +function ContentType (type) { + this.parameters = Object.create(null) + this.type = type +} diff --git a/scorpio-broker/http_server/node_modules/content-type/package.json b/scorpio-broker/http_server/node_modules/content-type/package.json new file mode 100644 index 0000000000000000000000000000000000000000..9db19f63fb96592d8d3bced654a72d47c12cef97 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/content-type/package.json @@ -0,0 +1,42 @@ +{ + "name": "content-type", + "description": "Create and parse HTTP Content-Type header", + "version": "1.0.5", + "author": "Douglas Christopher Wilson ", + "license": "MIT", + "keywords": [ + "content-type", + "http", + "req", + "res", + "rfc7231" + ], + "repository": "jshttp/content-type", + "devDependencies": { + "deep-equal": "1.0.1", + "eslint": "8.32.0", + "eslint-config-standard": "15.0.1", + "eslint-plugin-import": "2.27.5", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "6.1.1", + "eslint-plugin-standard": "4.1.0", + "mocha": "10.2.0", + "nyc": "15.1.0" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --reporter spec --check-leaks --bail test/", + "test-ci": "nyc --reporter=lcovonly --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "version": "node scripts/version-history.js && git add HISTORY.md" + } +} diff --git a/scorpio-broker/http_server/node_modules/cookie-signature/.npmignore b/scorpio-broker/http_server/node_modules/cookie-signature/.npmignore new file mode 100644 index 0000000000000000000000000000000000000000..f1250e584c94b80208b61cf7cae29db8e486a5c7 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/cookie-signature/.npmignore @@ -0,0 +1,4 @@ +support +test +examples +*.sock diff --git a/scorpio-broker/http_server/node_modules/cookie-signature/History.md b/scorpio-broker/http_server/node_modules/cookie-signature/History.md new file mode 100644 index 0000000000000000000000000000000000000000..78513cc3d28ce3516c93b4d425f83df247486ae5 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/cookie-signature/History.md @@ -0,0 +1,38 @@ +1.0.6 / 2015-02-03 +================== + +* use `npm test` instead of `make test` to run tests +* clearer assertion messages when checking input + + +1.0.5 / 2014-09-05 +================== + +* add license to package.json + +1.0.4 / 2014-06-25 +================== + + * corrected avoidance of timing attacks (thanks @tenbits!) + +1.0.3 / 2014-01-28 +================== + + * [incorrect] fix for timing attacks + +1.0.2 / 2014-01-28 +================== + + * fix missing repository warning + * fix typo in test + +1.0.1 / 2013-04-15 +================== + + * Revert "Changed underlying HMAC algo. to sha512." + * Revert "Fix for timing attacks on MAC verification." + +0.0.1 / 2010-01-03 +================== + + * Initial release diff --git a/scorpio-broker/http_server/node_modules/cookie-signature/Readme.md b/scorpio-broker/http_server/node_modules/cookie-signature/Readme.md new file mode 100644 index 0000000000000000000000000000000000000000..2559e841b02edfdc128176bfbdc0b938209a99ea --- /dev/null +++ b/scorpio-broker/http_server/node_modules/cookie-signature/Readme.md @@ -0,0 +1,42 @@ + +# cookie-signature + + Sign and unsign cookies. + +## Example + +```js +var cookie = require('cookie-signature'); + +var val = cookie.sign('hello', 'tobiiscool'); +val.should.equal('hello.DGDUkGlIkCzPz+C0B064FNgHdEjox7ch8tOBGslZ5QI'); + +var val = cookie.sign('hello', 'tobiiscool'); +cookie.unsign(val, 'tobiiscool').should.equal('hello'); +cookie.unsign(val, 'luna').should.be.false; +``` + +## License + +(The MIT License) + +Copyright (c) 2012 LearnBoost <tj@learnboost.com> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/scorpio-broker/http_server/node_modules/cookie-signature/index.js b/scorpio-broker/http_server/node_modules/cookie-signature/index.js new file mode 100644 index 0000000000000000000000000000000000000000..b8c9463a238b7ec090ff9090234e3f34322a36df --- /dev/null +++ b/scorpio-broker/http_server/node_modules/cookie-signature/index.js @@ -0,0 +1,51 @@ +/** + * Module dependencies. + */ + +var crypto = require('crypto'); + +/** + * Sign the given `val` with `secret`. + * + * @param {String} val + * @param {String} secret + * @return {String} + * @api private + */ + +exports.sign = function(val, secret){ + if ('string' != typeof val) throw new TypeError("Cookie value must be provided as a string."); + if ('string' != typeof secret) throw new TypeError("Secret string must be provided."); + return val + '.' + crypto + .createHmac('sha256', secret) + .update(val) + .digest('base64') + .replace(/\=+$/, ''); +}; + +/** + * Unsign and decode the given `val` with `secret`, + * returning `false` if the signature is invalid. + * + * @param {String} val + * @param {String} secret + * @return {String|Boolean} + * @api private + */ + +exports.unsign = function(val, secret){ + if ('string' != typeof val) throw new TypeError("Signed cookie string must be provided."); + if ('string' != typeof secret) throw new TypeError("Secret string must be provided."); + var str = val.slice(0, val.lastIndexOf('.')) + , mac = exports.sign(str, secret); + + return sha1(mac) == sha1(val) ? str : false; +}; + +/** + * Private + */ + +function sha1(str){ + return crypto.createHash('sha1').update(str).digest('hex'); +} diff --git a/scorpio-broker/http_server/node_modules/cookie-signature/package.json b/scorpio-broker/http_server/node_modules/cookie-signature/package.json new file mode 100644 index 0000000000000000000000000000000000000000..29c4498e07ab1ae43692d7a27f959771a459815c --- /dev/null +++ b/scorpio-broker/http_server/node_modules/cookie-signature/package.json @@ -0,0 +1,18 @@ +{ + "name": "cookie-signature", + "version": "1.0.6", + "description": "Sign and unsign cookies", + "keywords": ["cookie", "sign", "unsign"], + "author": "TJ Holowaychuk ", + "license": "MIT", + "repository": { "type": "git", "url": "https://github.com/visionmedia/node-cookie-signature.git"}, + "dependencies": {}, + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "scripts": { + "test": "mocha --require should --reporter spec" + }, + "main": "index" +} diff --git a/scorpio-broker/http_server/node_modules/cookie/LICENSE b/scorpio-broker/http_server/node_modules/cookie/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..058b6b4efa3f45896ae691f2558a2a1aca05bebd --- /dev/null +++ b/scorpio-broker/http_server/node_modules/cookie/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2012-2014 Roman Shtylman +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/scorpio-broker/http_server/node_modules/cookie/README.md b/scorpio-broker/http_server/node_modules/cookie/README.md new file mode 100644 index 0000000000000000000000000000000000000000..71fdac1110bba222a716c4b56b90028a6a9e5af4 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/cookie/README.md @@ -0,0 +1,317 @@ +# cookie + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][ci-image]][ci-url] +[![Coverage Status][coveralls-image]][coveralls-url] + +Basic HTTP cookie parser and serializer for HTTP servers. + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install cookie +``` + +## API + +```js +var cookie = require('cookie'); +``` + +### cookie.parse(str, options) + +Parse an HTTP `Cookie` header string and returning an object of all cookie name-value pairs. +The `str` argument is the string representing a `Cookie` header value and `options` is an +optional object containing additional parsing options. + +```js +var cookies = cookie.parse('foo=bar; equation=E%3Dmc%5E2'); +// { foo: 'bar', equation: 'E=mc^2' } +``` + +#### Options + +`cookie.parse` accepts these properties in the options object. + +##### decode + +Specifies a function that will be used to decode a cookie's value. Since the value of a cookie +has a limited character set (and must be a simple string), this function can be used to decode +a previously-encoded cookie value into a JavaScript string or other object. + +The default function is the global `decodeURIComponent`, which will decode any URL-encoded +sequences into their byte representations. + +**note** if an error is thrown from this function, the original, non-decoded cookie value will +be returned as the cookie's value. + +### cookie.serialize(name, value, options) + +Serialize a cookie name-value pair into a `Set-Cookie` header string. The `name` argument is the +name for the cookie, the `value` argument is the value to set the cookie to, and the `options` +argument is an optional object containing additional serialization options. + +```js +var setCookie = cookie.serialize('foo', 'bar'); +// foo=bar +``` + +#### Options + +`cookie.serialize` accepts these properties in the options object. + +##### domain + +Specifies the value for the [`Domain` `Set-Cookie` attribute][rfc-6265-5.2.3]. By default, no +domain is set, and most clients will consider the cookie to apply to only the current domain. + +##### encode + +Specifies a function that will be used to encode a cookie's value. Since value of a cookie +has a limited character set (and must be a simple string), this function can be used to encode +a value into a string suited for a cookie's value. + +The default function is the global `encodeURIComponent`, which will encode a JavaScript string +into UTF-8 byte sequences and then URL-encode any that fall outside of the cookie range. + +##### expires + +Specifies the `Date` object to be the value for the [`Expires` `Set-Cookie` attribute][rfc-6265-5.2.1]. +By default, no expiration is set, and most clients will consider this a "non-persistent cookie" and +will delete it on a condition like exiting a web browser application. + +**note** the [cookie storage model specification][rfc-6265-5.3] states that if both `expires` and +`maxAge` are set, then `maxAge` takes precedence, but it is possible not all clients by obey this, +so if both are set, they should point to the same date and time. + +##### httpOnly + +Specifies the `boolean` value for the [`HttpOnly` `Set-Cookie` attribute][rfc-6265-5.2.6]. When truthy, +the `HttpOnly` attribute is set, otherwise it is not. By default, the `HttpOnly` attribute is not set. + +**note** be careful when setting this to `true`, as compliant clients will not allow client-side +JavaScript to see the cookie in `document.cookie`. + +##### maxAge + +Specifies the `number` (in seconds) to be the value for the [`Max-Age` `Set-Cookie` attribute][rfc-6265-5.2.2]. +The given number will be converted to an integer by rounding down. By default, no maximum age is set. + +**note** the [cookie storage model specification][rfc-6265-5.3] states that if both `expires` and +`maxAge` are set, then `maxAge` takes precedence, but it is possible not all clients by obey this, +so if both are set, they should point to the same date and time. + +##### partitioned + +Specifies the `boolean` value for the [`Partitioned` `Set-Cookie`](rfc-cutler-httpbis-partitioned-cookies) +attribute. When truthy, the `Partitioned` attribute is set, otherwise it is not. By default, the +`Partitioned` attribute is not set. + +**note** This is an attribute that has not yet been fully standardized, and may change in the future. +This also means many clients may ignore this attribute until they understand it. + +More information about can be found in [the proposal](https://github.com/privacycg/CHIPS). + +##### path + +Specifies the value for the [`Path` `Set-Cookie` attribute][rfc-6265-5.2.4]. By default, the path +is considered the ["default path"][rfc-6265-5.1.4]. + +##### priority + +Specifies the `string` to be the value for the [`Priority` `Set-Cookie` attribute][rfc-west-cookie-priority-00-4.1]. + + - `'low'` will set the `Priority` attribute to `Low`. + - `'medium'` will set the `Priority` attribute to `Medium`, the default priority when not set. + - `'high'` will set the `Priority` attribute to `High`. + +More information about the different priority levels can be found in +[the specification][rfc-west-cookie-priority-00-4.1]. + +**note** This is an attribute that has not yet been fully standardized, and may change in the future. +This also means many clients may ignore this attribute until they understand it. + +##### sameSite + +Specifies the `boolean` or `string` to be the value for the [`SameSite` `Set-Cookie` attribute][rfc-6265bis-09-5.4.7]. + + - `true` will set the `SameSite` attribute to `Strict` for strict same site enforcement. + - `false` will not set the `SameSite` attribute. + - `'lax'` will set the `SameSite` attribute to `Lax` for lax same site enforcement. + - `'none'` will set the `SameSite` attribute to `None` for an explicit cross-site cookie. + - `'strict'` will set the `SameSite` attribute to `Strict` for strict same site enforcement. + +More information about the different enforcement levels can be found in +[the specification][rfc-6265bis-09-5.4.7]. + +**note** This is an attribute that has not yet been fully standardized, and may change in the future. +This also means many clients may ignore this attribute until they understand it. + +##### secure + +Specifies the `boolean` value for the [`Secure` `Set-Cookie` attribute][rfc-6265-5.2.5]. When truthy, +the `Secure` attribute is set, otherwise it is not. By default, the `Secure` attribute is not set. + +**note** be careful when setting this to `true`, as compliant clients will not send the cookie back to +the server in the future if the browser does not have an HTTPS connection. + +## Example + +The following example uses this module in conjunction with the Node.js core HTTP server +to prompt a user for their name and display it back on future visits. + +```js +var cookie = require('cookie'); +var escapeHtml = require('escape-html'); +var http = require('http'); +var url = require('url'); + +function onRequest(req, res) { + // Parse the query string + var query = url.parse(req.url, true, true).query; + + if (query && query.name) { + // Set a new cookie with the name + res.setHeader('Set-Cookie', cookie.serialize('name', String(query.name), { + httpOnly: true, + maxAge: 60 * 60 * 24 * 7 // 1 week + })); + + // Redirect back after setting cookie + res.statusCode = 302; + res.setHeader('Location', req.headers.referer || '/'); + res.end(); + return; + } + + // Parse the cookies on the request + var cookies = cookie.parse(req.headers.cookie || ''); + + // Get the visitor name set in the cookie + var name = cookies.name; + + res.setHeader('Content-Type', 'text/html; charset=UTF-8'); + + if (name) { + res.write('

Welcome back, ' + escapeHtml(name) + '!

'); + } else { + res.write('

Hello, new visitor!

'); + } + + res.write('
'); + res.write(' '); + res.end('
'); +} + +http.createServer(onRequest).listen(3000); +``` + +## Testing + +```sh +$ npm test +``` + +## Benchmark + +``` +$ npm run bench + +> cookie@0.5.0 bench +> node benchmark/index.js + + node@18.18.2 + acorn@8.10.0 + ada@2.6.0 + ares@1.19.1 + brotli@1.0.9 + cldr@43.1 + icu@73.2 + llhttp@6.0.11 + modules@108 + napi@9 + nghttp2@1.57.0 + nghttp3@0.7.0 + ngtcp2@0.8.1 + openssl@3.0.10+quic + simdutf@3.2.14 + tz@2023c + undici@5.26.3 + unicode@15.0 + uv@1.44.2 + uvwasi@0.0.18 + v8@10.2.154.26-node.26 + zlib@1.2.13.1-motley + +> node benchmark/parse-top.js + + cookie.parse - top sites + + 14 tests completed. + + parse accounts.google.com x 2,588,913 ops/sec ±0.74% (186 runs sampled) + parse apple.com x 2,370,002 ops/sec ±0.69% (186 runs sampled) + parse cloudflare.com x 2,213,102 ops/sec ±0.88% (188 runs sampled) + parse docs.google.com x 2,194,157 ops/sec ±1.03% (184 runs sampled) + parse drive.google.com x 2,265,084 ops/sec ±0.79% (187 runs sampled) + parse en.wikipedia.org x 457,099 ops/sec ±0.81% (186 runs sampled) + parse linkedin.com x 504,407 ops/sec ±0.89% (186 runs sampled) + parse maps.google.com x 1,230,959 ops/sec ±0.98% (186 runs sampled) + parse microsoft.com x 926,294 ops/sec ±0.88% (184 runs sampled) + parse play.google.com x 2,311,338 ops/sec ±0.83% (185 runs sampled) + parse support.google.com x 1,508,850 ops/sec ±0.86% (186 runs sampled) + parse www.google.com x 1,022,582 ops/sec ±1.32% (182 runs sampled) + parse youtu.be x 332,136 ops/sec ±1.02% (185 runs sampled) + parse youtube.com x 323,833 ops/sec ±0.77% (183 runs sampled) + +> node benchmark/parse.js + + cookie.parse - generic + + 6 tests completed. + + simple x 3,214,032 ops/sec ±1.61% (183 runs sampled) + decode x 587,237 ops/sec ±1.16% (187 runs sampled) + unquote x 2,954,618 ops/sec ±1.35% (183 runs sampled) + duplicates x 857,008 ops/sec ±0.89% (187 runs sampled) + 10 cookies x 292,133 ops/sec ±0.89% (187 runs sampled) + 100 cookies x 22,610 ops/sec ±0.68% (187 runs sampled) +``` + +## References + +- [RFC 6265: HTTP State Management Mechanism][rfc-6265] +- [Same-site Cookies][rfc-6265bis-09-5.4.7] + +[rfc-cutler-httpbis-partitioned-cookies]: https://tools.ietf.org/html/draft-cutler-httpbis-partitioned-cookies/ +[rfc-west-cookie-priority-00-4.1]: https://tools.ietf.org/html/draft-west-cookie-priority-00#section-4.1 +[rfc-6265bis-09-5.4.7]: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-09#section-5.4.7 +[rfc-6265]: https://tools.ietf.org/html/rfc6265 +[rfc-6265-5.1.4]: https://tools.ietf.org/html/rfc6265#section-5.1.4 +[rfc-6265-5.2.1]: https://tools.ietf.org/html/rfc6265#section-5.2.1 +[rfc-6265-5.2.2]: https://tools.ietf.org/html/rfc6265#section-5.2.2 +[rfc-6265-5.2.3]: https://tools.ietf.org/html/rfc6265#section-5.2.3 +[rfc-6265-5.2.4]: https://tools.ietf.org/html/rfc6265#section-5.2.4 +[rfc-6265-5.2.5]: https://tools.ietf.org/html/rfc6265#section-5.2.5 +[rfc-6265-5.2.6]: https://tools.ietf.org/html/rfc6265#section-5.2.6 +[rfc-6265-5.3]: https://tools.ietf.org/html/rfc6265#section-5.3 + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/jshttp/cookie/master?label=ci +[ci-url]: https://github.com/jshttp/cookie/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/cookie/master +[coveralls-url]: https://coveralls.io/r/jshttp/cookie?branch=master +[node-image]: https://badgen.net/npm/node/cookie +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/cookie +[npm-url]: https://npmjs.org/package/cookie +[npm-version-image]: https://badgen.net/npm/v/cookie diff --git a/scorpio-broker/http_server/node_modules/cookie/SECURITY.md b/scorpio-broker/http_server/node_modules/cookie/SECURITY.md new file mode 100644 index 0000000000000000000000000000000000000000..fd4a6c53a9cd1abacf91125dab3fde3163b4c412 --- /dev/null +++ b/scorpio-broker/http_server/node_modules/cookie/SECURITY.md @@ -0,0 +1,25 @@ +# Security Policies and Procedures + +## Reporting a Bug + +The `cookie` team and community take all security bugs seriously. Thank +you for improving the security of the project. We appreciate your efforts and +responsible disclosure and will make every effort to acknowledge your +contributions. + +Report security bugs by emailing the current owner(s) of `cookie`. This +information can be found in the npm registry using the command +`npm owner ls cookie`. +If unsure or unable to get the information from the above, open an issue +in the [project issue tracker](https://github.com/jshttp/cookie/issues) +asking for the current contact information. + +To ensure the timely response to your report, please ensure that the entirety +of the report is contained within the email body and not solely behind a web +link or an attachment. + +At least one owner will acknowledge your email within 48 hours, and will send a +more detailed response within 48 hours indicating the next steps in handling +your report. After the initial reply to your report, the owners will +endeavor to keep you informed of the progress towards a fix and full +announcement, and may ask for additional information or guidance. diff --git a/scorpio-broker/http_server/node_modules/cookie/index.js b/scorpio-broker/http_server/node_modules/cookie/index.js new file mode 100644 index 0000000000000000000000000000000000000000..51a58cbe953d5914b2c6151228763eb2e62ea26e --- /dev/null +++ b/scorpio-broker/http_server/node_modules/cookie/index.js @@ -0,0 +1,334 @@ +/*! + * cookie + * Copyright(c) 2012-2014 Roman Shtylman + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module exports. + * @public + */ + +exports.parse = parse; +exports.serialize = serialize; + +/** + * Module variables. + * @private + */ + +var __toString = Object.prototype.toString + +/** + * RegExp to match cookie-name in RFC 6265 sec 4.1.1 + * This refers out to the obsoleted definition of token in RFC 2616 sec 2.2 + * which has been replaced by the token definition in RFC 7230 appendix B. + * + * cookie-name = token + * token = 1*tchar + * tchar = "!" / "#" / "$" / "%" / "&" / "'" / + * "*" / "+" / "-" / "." / "^" / "_" / + * "`" / "|" / "~" / DIGIT / ALPHA + */ + +var cookieNameRegExp = /^[!#$%&'*+\-.^_`|~0-9A-Za-z]+$/; + +/** + * RegExp to match cookie-value in RFC 6265 sec 4.1.1 + * + * cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE ) + * cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E + * ; US-ASCII characters excluding CTLs, + * ; whitespace DQUOTE, comma, semicolon, + * ; and backslash + */ + +var cookieValueRegExp = /^("?)[\u0021\u0023-\u002B\u002D-\u003A\u003C-\u005B\u005D-\u007E]*\1$/; + +/** + * RegExp to match domain-value in RFC 6265 sec 4.1.1 + * + * domain-value = + * ; defined in [RFC1034], Section 3.5, as + * ; enhanced by [RFC1123], Section 2.1 + * =